lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
9a6407132011ce75a784739e86c00b6a009f548d
0
getlantern/lantern-common
package org.lantern; public interface Stats { long getUptime(); long getPeerCount(); long getPeerCountThisRun(); long getUpBytesThisRun(); long getDownBytesThisRun(); long getUpBytesThisRunForPeers(); long getUpBytesThisRunViaProxies(); long getUpBytesThisRunToPeers(); long getDownBytesThisRunForPeers(); long getDownBytesThisRunViaProxies(); long getDownBytesThisRunFromPeers(); long getUpBytesPerSecond(); long getDownBytesPerSecond(); long getUpBytesPerSecondForPeers(); long getUpBytesPerSecondViaProxies(); long getDownBytesPerSecondForPeers(); long getDownBytesPerSecondViaProxies(); long getDownBytesPerSecondFromPeers(); long getUpBytesPerSecondToPeers(); long getTotalBytesProxied(); long getDirectBytes(); int getTotalProxiedRequests(); int getDirectRequests(); boolean isUpnp(); boolean isNatpmp(); String getCountryCode(); String getVersion(); double getProcessCpuUsage(); double getSystemCpuUsage(); double getSystemLoadAverage(); double getMemoryUsageInBytes(); long getNumberOfOpenFileDescriptors(); }
src/main/java/org/lantern/Stats.java
package org.lantern; public interface Stats { long getUptime(); long getPeerCount(); long getPeerCountThisRun(); long getUpBytesThisRun(); long getDownBytesThisRun(); long getUpBytesThisRunForPeers(); long getUpBytesThisRunViaProxies(); long getUpBytesThisRunToPeers(); long getDownBytesThisRunForPeers(); long getDownBytesThisRunViaProxies(); long getDownBytesThisRunFromPeers(); long getUpBytesPerSecond(); long getDownBytesPerSecond(); long getUpBytesPerSecondForPeers(); long getUpBytesPerSecondViaProxies(); long getDownBytesPerSecondForPeers(); long getDownBytesPerSecondViaProxies(); long getDownBytesPerSecondFromPeers(); long getUpBytesPerSecondToPeers(); long getTotalBytesProxied(); long getDirectBytes(); int getTotalProxiedRequests(); int getDirectRequests(); boolean isUpnp(); boolean isNatpmp(); String getCountryCode(); String getVersion(); double getCpuUtilization(); double getLoadAverage(); double getMemoryUsageInBytes(); double getNumberOfOpenFileDescriptors(); }
Added Metric for tracking moving averages
src/main/java/org/lantern/Stats.java
Added Metric for tracking moving averages
<ide><path>rc/main/java/org/lantern/Stats.java <ide> <ide> String getVersion(); <ide> <del> double getCpuUtilization(); <add> double getProcessCpuUsage(); <ide> <del> double getLoadAverage(); <add> double getSystemCpuUsage(); <add> <add> double getSystemLoadAverage(); <ide> <ide> double getMemoryUsageInBytes(); <ide> <del> double getNumberOfOpenFileDescriptors(); <add> long getNumberOfOpenFileDescriptors(); <ide> <ide> }
JavaScript
mit
b40aa676dfc5b82588d7c5dbd8cf21650c21edbe
0
AndreaZain/dl-module,danliris/dl-module,kristika/dl-module,baguswidypriyono/dl-module,indriHutabalian/dl-module
'use strict' var ObjectId = require("mongodb").ObjectId; require("mongodb-toolkit"); var DLModels = require('dl-models'); var map = DLModels.map; var ProductionOrder = DLModels.sales.ProductionOrder; var ProductionOrderDetail = DLModels.sales.ProductionOrderDetail; var ProductionOrderLampStandard = DLModels.sales.ProductionOrderLampStandard; var LampStandardManager = require('../master/lamp-standard-manager'); var BuyerManager = require('../master/buyer-manager'); var UomManager = require('../master/uom-manager'); var ProductManager = require('../master/product-manager'); var ProcessTypeManager = require('../master/process-type-manager'); var OrderTypeManager = require('../master/order-type-manager'); var ColorTypeManager = require('../master/color-type-manager'); var FinishTypeManager = require('../master/finish-type-manager'); var StandardTestManager = require('../master/standard-test-manager'); var MaterialConstructionManager = require('../master/material-construction-manager'); var YarnMaterialManager = require('../master/yarn-material-manager'); var AccountManager = require('../auth/account-manager'); var BaseManager = require('module-toolkit').BaseManager; var i18n = require('dl-i18n'); var generateCode = require("../../utils/code-generator"); var assert = require('assert'); module.exports = class ProductionOrderManager extends BaseManager { constructor(db, user) { super(db, user); this.collection = this.db.collection(map.sales.collection.ProductionOrder); this.dailyOperationCollection = this.db.collection(map.production.finishingPrinting.collection.DailyOperation); this.fabricQualityControlCollection = this.db.use(map.production.finishingPrinting.qualityControl.defect.collection.FabricQualityControl); this.LampStandardManager = new LampStandardManager(db, user); this.BuyerManager = new BuyerManager(db, user); this.UomManager = new UomManager(db, user); this.ProductManager = new ProductManager(db, user); this.ProcessTypeManager = new ProcessTypeManager(db, user); this.ColorTypeManager = new ColorTypeManager(db, user); this.OrderTypeManager = new OrderTypeManager(db, user); this.MaterialConstructionManager = new MaterialConstructionManager(db, user); this.YarnMaterialManager = new YarnMaterialManager(db, user); this.FinishTypeManager = new FinishTypeManager(db, user); this.StandardTestManager = new StandardTestManager(db, user); this.AccountManager = new AccountManager(db, user); } _getQuery(paging) { var deletedFilter = { _deleted: false }, keywordFilter = {}; var query = {}; if (paging.keyword) { var regex = new RegExp(paging.keyword, "i"); var filterSalesContract = { 'salesContractNo': { '$regex': regex } }; var filterOrderNo = { 'orderNo': { '$regex': regex } }; var filterBuyerName = { 'buyer.name': { '$regex': regex } }; var filterBuyerType = { 'buyer.type': { '$regex': regex } }; var filterProcessType = { 'processType.name': { '$regex': regex } }; keywordFilter = { '$or': [filterSalesContract, filterOrderNo, filterBuyerName, filterBuyerType, filterProcessType] }; } query = { '$and': [deletedFilter, paging.filter, keywordFilter] } return query; } _beforeInsert(productionOrder) { productionOrder.orderNo = productionOrder.orderNo === "" ? generateCode() : productionOrder.orderNo; productionOrder._createdDate = new Date(); return Promise.resolve(productionOrder); } _validate(productionOrder) { var errors = {}; var valid = productionOrder; var getProductionOrder = this.collection.singleOrDefault({ _id: { '$ne': new ObjectId(valid._id) }, orderNo: valid.orderNo }); var getBuyer = ObjectId.isValid(valid.buyerId) ? this.BuyerManager.getSingleByIdOrDefault(valid.buyerId) : Promise.resolve(null); var getUom = valid.uom && ObjectId.isValid(valid.uomId) ? this.UomManager.getSingleByIdOrDefault(valid.uomId) : Promise.resolve(null); var getProduct = ObjectId.isValid(valid.materialId) ? this.ProductManager.getSingleByIdOrDefault(valid.materialId) : Promise.resolve(null); var getProcessType = ObjectId.isValid(valid.processTypeId) ? this.ProcessTypeManager.getSingleByIdOrDefault(valid.processTypeId) : Promise.resolve(null); var getOrderType = ObjectId.isValid(valid.orderTypeId) ? this.OrderTypeManager.getSingleByIdOrDefault(valid.orderTypeId) : Promise.resolve(null); var getFinishType = ObjectId.isValid(valid.finishTypeId) ? this.FinishTypeManager.getSingleByIdOrDefault(valid.finishTypeId) : Promise.resolve(null); var getYarnMaterial = ObjectId.isValid(valid.yarnMaterialId) ? this.YarnMaterialManager.getSingleByIdOrDefault(valid.yarnMaterialId) : Promise.resolve(null); var getStandardTest = ObjectId.isValid(valid.standardTestId) ? this.StandardTestManager.getSingleByIdOrDefault(valid.standardTestId) : Promise.resolve(null); var getMaterialConstruction = ObjectId.isValid(valid.materialConstructionId) ? this.MaterialConstructionManager.getSingleByIdOrDefault(valid.materialConstructionId) : Promise.resolve(null); var getAccount = ObjectId.isValid(valid.accountId) ? this.AccountManager.getSingleByIdOrDefault(valid.accountId) : Promise.resolve(null); valid.details = valid.details || []; var getColorTypes = []; for (var detail of valid.details) { if (ObjectId.isValid(detail.colorTypeId)) { var color = ObjectId.isValid(detail.colorTypeId) ? this.ColorTypeManager.getSingleByIdOrDefault(detail.colorTypeId) : Promise.resolve(null); getColorTypes.push(color); } } valid.lampStandards = valid.lampStandards || []; var getLampStandards = []; for (var lamp of valid.lampStandards) { if (ObjectId.isValid(lamp.lampStandardId)) { var lamps = ObjectId.isValid(lamp.lampStandardId) ? this.LampStandardManager.getSingleByIdOrDefault(lamp.lampStandardId) : Promise.resolve(null); getLampStandards.push(lamps); } } return Promise.all([getProductionOrder, getBuyer, getUom, getProduct, getProcessType, getOrderType, getFinishType, getYarnMaterial, getStandardTest, getMaterialConstruction, getAccount].concat(getColorTypes, getLampStandards)) .then(results => { var _productionOrder = results[0]; var _buyer = results[1]; var _uom = results[2]; var _material = results[3]; var _process = results[4]; var _order = results[5]; var _finish = results[6]; var _yarn = results[7]; var _standard = results[8]; var _construction = results[9]; var _account = results[10]; var _colors = results.slice(11, 11 + getColorTypes.length); var _lampStandards = results.slice(11 + getColorTypes.length, results.length); if (_productionOrder) { errors["orderNo"] = i18n.__("ProductionOrder.orderNo.isExist:%s is Exist", i18n.__("Product.orderNo._:orderNo")); //"orderNo sudah ada"; } if (valid.uom) { if (!_uom) errors["uom"] = i18n.__("ProductionOrder.uom.isRequired:%s is required", i18n.__("Product.uom._:Uom")); //"Satuan tidak boleh kosong"; } else errors["uom"] = i18n.__("ProductionOrder.uom.isRequired:%s is required", i18n.__("Product.uom._:Uom")); //"Satuan tidak boleh kosong"; if (!valid.salesContractNo || valid.salesContractNo === '') { errors["salesContractNo"] = i18n.__("ProductionOrder.salesContractNo.isRequired:%s is required", i18n.__("ProductionOrder.salesContractNo._:SalesContractNo")); //"salesContractNo tidak boleh kosong"; } if (!_material) errors["material"] = i18n.__("ProductionOrder.material.isRequired:%s is not exists", i18n.__("ProductionOrder.material._:Material")); //"material tidak boleh kosong"; if (!_process) errors["processType"] = i18n.__("ProductionOrder.processType.isRequired:%s is not exists", i18n.__("ProductionOrder.processType._:ProcessType")); //"processType tidak boleh kosong"; if (!_order) errors["orderType"] = i18n.__("ProductionOrder.orderType.isRequired:%s is not exists", i18n.__("ProductionOrder.orderType._:OrderType")); //"orderType tidak boleh kosong"; if (!_yarn) errors["yarnMaterial"] = i18n.__("ProductionOrder.yarnMaterial.isRequired:%s is not exists", i18n.__("ProductionOrder.yarnMaterial._:YarnMaterial")); //"yarnMaterial tidak boleh kosong"; if (!_construction) errors["materialConstruction"] = i18n.__("ProductionOrder.materialConstruction.isRequired:%s is not exists", i18n.__("ProductionOrder.materialConstruction._:MaterialConstruction")); //"materialConstruction tidak boleh kosong"; if (!_finish) errors["finishType"] = i18n.__("ProductionOrder.finishType.isRequired:%s is not exists", i18n.__("ProductionOrder.finishType._:FinishType")); //"finishType tidak boleh kosong"; if (!_standard) errors["standardTest"] = i18n.__("ProductionOrder.standardTest.isRequired:%s is not exists", i18n.__("ProductionOrder.standardTest._:StandardTest")); //"standardTest tidak boleh kosong"; if (!_account) { errors["account"] = i18n.__("ProductionOrder.account.isRequired:%s is not exists", i18n.__("ProductionOrder.account._:Account")); //"account tidak boleh kosong"; } if (!valid.packingInstruction || valid.packingInstruction === '') { errors["packingInstruction"] = i18n.__("ProductionOrder.packingInstruction.isRequired:%s is required", i18n.__("ProductionOrder.packingInstruction._:PackingInstruction")); //"PackingInstruction tidak boleh kosong"; } if (!valid.materialOrigin || valid.materialOrigin === '') { errors["materialOrigin"] = i18n.__("ProductionOrder.materialOrigin.isRequired:%s is required", i18n.__("ProductionOrder.materialOrigin._:MaterialOrigin")); //"materialOrigin tidak boleh kosong"; } if (!valid.finishWidth || valid.finishWidth === '') { errors["finishWidth"] = i18n.__("ProductionOrder.finishWidth.isRequired:%s is required", i18n.__("ProductionOrder.finishWidth._:FinishWidth")); //"finishWidth tidak boleh kosong"; } if (!valid.sample || valid.sample === '') { errors["sample"] = i18n.__("ProductionOrder.sample.isRequired:%s is required", i18n.__("ProductionOrder.sample._:Sample")); //"sample tidak boleh kosong"; } if (!valid.handlingStandard || valid.handlingStandard === '') { errors["handlingStandard"] = i18n.__("ProductionOrder.handlingStandard.isRequired:%s is required", i18n.__("ProductionOrder.handlingStandard._:HandlingStandard")); //"handlingStandard tidak boleh kosong"; } if (!valid.shrinkageStandard || valid.shrinkageStandard === '') { errors["shrinkageStandard"] = i18n.__("ProductionOrder.shrinkageStandard.isRequired:%s is required", i18n.__("ProductionOrder.shrinkageStandard._:ShrinkageStandard")); //"shrinkageStandard tidak boleh kosong"; } if (!valid.deliveryDate || valid.deliveryDate === "") { errors["deliveryDate"] = i18n.__("ProductionOrder.deliveryDate.isRequired:%s is required", i18n.__("ProductionOrder.deliveryDate._:deliveryDate")); //"deliveryDate tidak boleh kosong"; } // else{ // valid.deliveryDate=new Date(valid.deliveryDate); // var today=new Date(); // today.setHours(0,0,0,0); // if(today>valid.deliveryDate){ // errors["deliveryDate"] = i18n.__("ProductionOrder.deliveryDate.shouldNot:%s should not be less than today's date", i18n.__("ProductionOrder.deliveryDate._:deliveryDate")); //"deliveryDate tidak boleh kurang dari tanggal hari ini"; // } // } if (_order) { if (_order.name.trim().toLowerCase() == "printing") { if (!valid.RUN || valid.RUN == "") { errors["RUN"] = i18n.__("ProductionOrder.RUN.isRequired:%s is required", i18n.__("ProductionOrder.RUN._:RUN")); //"RUN tidak boleh kosong"; } if (valid.RUN && valid.RUN != "Tanpa RUN") { if (!valid.RUNWidth || valid.RUNWidth.length <= 0) { errors["RUNWidth"] = i18n.__("ProductionOrder.RUNWidth.isRequired:%s is required", i18n.__("ProductionOrder.RUNWidth._:RUNWidth")); //"RUNWidth tidak boleh kosong"; } if (valid.RUNWidth.length > 0) { for (var r = 0; r < valid.RUNWidth.length; r++) { if (valid.RUNWidth[r] <= 0) { errors["RUNWidth"] = i18n.__("ProductionOrder.RUNWidth.shouldNot:%s should not be less than or equal zero", i18n.__("ProductionOrder.RUNWidth._:RUNWidth")); //"RUNWidth tidak boleh nol"; break; } } } } if (!valid.designNumber || valid.designNumber == "") { errors["designNumber"] = i18n.__("ProductionOrder.designNumber.isRequired:%s is required", i18n.__("ProductionOrder.designNumber._:DesignNumber")); //"designNumber tidak boleh kosong"; } if (!valid.designCode || valid.designCode == "") { errors["designCode"] = i18n.__("ProductionOrder.designCode.isRequired:%s is required", i18n.__("ProductionOrder.designCode._:DesignCode")); //"designCode tidak boleh kosong"; } } } if (!_buyer) errors["buyer"] = i18n.__("ProductionOrder.buyer.isRequired:%s is not exists", i18n.__("ProductionOrder.buyer._:Buyer")); //"Buyer tidak boleh kosong"; if (!valid.orderQuantity || valid.orderQuantity === 0) errors["orderQuantity"] = i18n.__("ProductionOrder.orderQuantity.isRequired:%s is required", i18n.__("ProductionOrder.orderQuantity._:OrderQuantity")); //"orderQuantity tidak boleh kosong"; else { var totalqty = 0; if (valid.details.length > 0) { for (var i of valid.details) { totalqty += i.quantity; } } if (valid.orderQuantity != totalqty) { errors["orderQuantity"] = i18n.__("ProductionOrder.orderQuantity.shouldNot:%s should equal SUM quantity in details", i18n.__("ProductionOrder.orderQuantity._:OrderQuantity")); //"orderQuantity tidak boleh berbeda dari total jumlah detail"; } } if (!valid.shippingQuantityTolerance || valid.shippingQuantityTolerance === 0) errors["shippingQuantityTolerance"] = i18n.__("ProductionOrder.shippingQuantityTolerance.isRequired:%s is required", i18n.__("ProductionOrder.shippingQuantityTolerance._:ShippingQuantityTolerance")); //"shippingQuantityTolerance tidak boleh kosong"; else if (valid.shippingQuantityTolerance > 100) { errors["shippingQuantityTolerance"] = i18n.__("ProductionOrder.shippingQuantityTolerance.shouldNot:%s should not more than 100", i18n.__("ProductionOrder.shippingQuantityTolerance._:ShippingQuantityTolerance")); //"shippingQuantityTolerance tidak boleh lebih dari 100"; } if (!valid.materialWidth || valid.materialWidth === "") errors["materialWidth"] = i18n.__("ProductionOrder.materialWidth.isRequired:%s is required", i18n.__("ProductionOrder.materialWidth._:MaterialWidth")); //"materialWidth tidak boleh kosong"; valid.lampStandards = valid.lampStandards || []; if (valid.lampStandards && valid.lampStandards.length <= 0) { errors["lampStandards"] = i18n.__("ProductionOrder.lampStandards.isRequired:%s is required", i18n.__("ProductionOrder.lampStandards._:LampStandards")); //"Harus ada minimal 1 lampStandard"; } else if (valid.lampStandards.length > 0) { var lampErrors = []; for (var lamp of valid.lampStandards) { var lampError = {}; if (!_lampStandards || _lampStandards.length <= 0) { lampError["lampStandards"] = i18n.__("ProductionOrder.lampStandards.lampStandard.isRequired:%s is not exists", i18n.__("ProductionOrder.lampStandards.lampStandard._:LampStandard")); //"lampStandard tidak boleh kosong"; } if (!lamp.lampStandard._id) { lampError["lampStandards"] = i18n.__("ProductionOrder.lampStandards.lampStandard.isRequired:%s is not exists", i18n.__("ProductionOrder.lampStandards.lampStandard._:LampStandard")); //"lampStandard tidak boleh kosong"; } if (Object.getOwnPropertyNames(lampError).length > 0) lampErrors.push(lampError); } if (lampErrors.length > 0) errors.lampStandards = lampErrors; } valid.details = valid.details || []; if (valid.details && valid.details.length <= 0) { errors["details"] = i18n.__("ProductionOrder.details.isRequired:%s is required", i18n.__("ProductionOrder.details._:Details")); //"Harus ada minimal 1 detail"; } else if (valid.details.length > 0) { var detailErrors = []; var totalqty = 0; for (var i of valid.details) { totalqty += i.quantity; } for (var detail of valid.details) { var detailError = {}; detail.code = generateCode(); if (!detail.colorRequest || detail.colorRequest == "") detailError["colorRequest"] = i18n.__("ProductionOrder.details.colorRequest.isRequired:%s is required", i18n.__("ProductionOrder.details.colorRequest._:ColorRequest")); //"colorRequest tidak boleh kosong"; if (detail.quantity <= 0) detailError["quantity"] = i18n.__("ProductionOrder.details.quantity.isRequired:%s is required", i18n.__("ProductionOrder.details.quantity._:Quantity")); //Jumlah barang tidak boleh kosong"; if (valid.orderQuantity != totalqty) detailError["total"] = i18n.__("ProductionOrder.details.quantity.shouldNot:%s Total should equal Order Quantity", i18n.__("ProductionOrder.details.quantity._:Quantity")); //Jumlah barang tidak boleh berbeda dari jumlah order"; if (!_uom) detailError["uom"] = i18n.__("ProductionOrder.details.uom.isRequired:%s is not exists", i18n.__("ProductionOrder.details.uom._:Uom")); //"satuan tidak boleh kosong"; if (_uom) { detail.uomId = new ObjectId(_uom._id); } if (!detail.colorTemplate || detail.colorTemplate == "") detailError["colorTemplate"] = i18n.__("ProductionOrder.details.colorTemplate.isRequired:%s is required", i18n.__("ProductionOrder.details.colorTemplate._:ColorTemplate")); //"colorTemplate tidak boleh kosong"; } if (_order) { if (_order.name.toLowerCase() == "yarn dyed" || _order.name.toLowerCase() == "printing") { _colors = {}; } else { if (!_colors) detailError["colorType"] = i18n.__("ProductionOrder.details.colorType.isRequired:%s is required", i18n.__("ProductionOrder.details.colorType._:ColorType")); //"colorType tidak boleh kosong"; else if (!detail.colorType) { detailError["colorType"] = i18n.__("ProductionOrder.details.colorType.isRequired:%s is required", i18n.__("ProductionOrder.details.colorType._:ColorType")); //"colorType tidak boleh kosong"; } } if (Object.getOwnPropertyNames(detailError).length > 0) detailErrors.push(detailError); } if (detailErrors.length > 0) errors.details = detailErrors; } if (!valid.orderNo || valid.orderNo === '') { valid.orderNo = generateCode(); } if (_buyer) { valid.buyerId = new ObjectId(_buyer._id); } if (_uom) { valid.uomId = new ObjectId(_uom._id); } if (_process) { valid.processTypeId = new ObjectId(_process._id); } if (_account) { valid.accountId = new ObjectId(_account._id); } if (valid.lampStandards.length > 0) { for (var lamp of valid.lampStandards) { for (var _lampStandard of _lampStandards) { if (_lampStandard) { if (lamp.lampStandardId.toString() === _lampStandard._id.toString()) { lamp.lampStandardId = _lampStandard._id; lamp.lampStandard = _lampStandard; } } } } } if (_order) { valid.orderTypeId = new ObjectId(_order._id); if (_order.name.toLowerCase() != "printing") { valid.RUN = ""; valid.RUNWidth = []; valid.designCode = ""; valid.designNumber = ""; valid.articleFabricEdge = ""; } if (_order.name.toLowerCase() == "yarn dyed" || _order.name.toLowerCase() == "printing") { for (var detail of valid.details) { detail.colorTypeId = null; detail.colorType = null; } } else { for (var detail of valid.details) { if (detail.colorType) { for (var _color of _colors) { if (_color) { if (detail.colorTypeId.toString() === _color._id.toString()) { detail.colorTypeId = _color._id; detail.colorType = _color; } } } } } } } if (_material) { valid.material = _material; valid.materialId = new ObjectId(_material._id); } if (_finish) { valid.finishType = _finish; valid.finishTypeId = new ObjectId(_finish._id); } if (_yarn) { valid.yarnMaterial = _yarn; valid.yarnMaterialId = new ObjectId(_yarn._id); } if (_standard) { valid.standardTest = _standard; valid.standardTestId = _standard._id; } if (_construction) { valid.materialConstruction = _construction; valid.materialConstructionId = _construction._id; } valid.deliveryDate = new Date(valid.deliveryDate); if (Object.getOwnPropertyNames(errors).length > 0) { var ValidationError = require('module-toolkit').ValidationError; return Promise.reject(new ValidationError('data does not pass validation', errors)); } if (!valid.stamp) { valid = new ProductionOrder(valid); } valid.stamp(this.user.username, "manager"); return Promise.resolve(valid); }); } _createIndexes() { var dateIndex = { name: `ix_${map.sales.collection.ProductionOrder}__updatedDate`, key: { _updatedDate: -1 } } var noIndex = { name: `ix_${map.sales.collection.ProductionOrder}_orderNo`, key: { orderNo: 1 }, unique: true } return this.collection.createIndexes([dateIndex, noIndex]); } pdf(id) { return new Promise((resolve, reject) => { this.getSingleById(id) .then(productionOrder => { var getDefinition = require("../../pdf/definitions/production-order"); var definition = getDefinition(productionOrder); var generatePdf = require("../../pdf/pdf-generator"); generatePdf(definition) .then(binary => { resolve(binary); }) .catch(e => { reject(e); }); }) .catch(e => { reject(e); }); }); } getSingleProductionOrderDetail(detailCode) { return new Promise((resolve, reject) => { var query = { "details": { "$elemMatch": { "code": detailCode } } }; this.collection.singleOrDefault(query).then((result) => { var dataReturn = {}; if (result) { for (var detail of result.details) { if (detailCode === detail.code) dataReturn = new ProductionOrderDetail(detail); } } resolve(dataReturn); }); }); } getReport(query) { return new Promise((resolve, reject) => { if (!query.size) { query.size = 20; } if (!query.page) { query.page = 1; } var _page = parseInt(query.page); var _size = parseInt(query.size); var qry = Object.assign({}); var filter = query.filter || {}; if (filter.salesContractNo) { Object.assign(qry, { "salesContractNo": { "$regex": (new RegExp(filter.salesContractNo, "i")) } }); } if (filter.orderNo) { Object.assign(qry, { "orderNo": { "$regex": (new RegExp(filter.orderNo, "i")) } }); } if (filter.orderTypeId) { Object.assign(qry, { "orderTypeId": (new ObjectId(filter.orderTypeId)) }); } if (filter.processTypeId) { Object.assign(qry, { "processTypeId": (new ObjectId(filter.processTypeId)) }); } if (filter.buyerId) { Object.assign(qry, { "buyerId": (new ObjectId(filter.buyerId)) }); } if (filter.accountId) { Object.assign(qry, { "accountId": (new ObjectId(filter.accountId)) }); } if (filter.sdate && filter.edate) { Object.assign(qry, { "_createdDate": { "$gte": new Date(`${filter.sdate} 00:00:00`), "$lte": new Date(`${filter.edate} 23:59:59`) } }); } qry = Object.assign(qry, { _deleted: false }); var getPrdOrder = []; getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $group: { _id: null, count: { $sum: 1 } } } ]) .toArray()); if ((query.accept || '').toString().indexOf("application/xls") < 0) { getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $project: { "salesContractNo": 1, "_createdDate": 1, "orderNo": 1, "orderType": "$orderType.name", "processType": "$processType.name", "buyer": "$buyer.name", "buyerType": "$buyer.type", "orderQuantity": "$orderQuantity", "uom": "$uom.unit", "colorCode": "$details.code", "colorTemplate": "$details.colorTemplate", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", "deliveryDate": "$deliveryDate", "firstname": "$account.profile.firstname", "lastname": "$account.profile.lastname" } }, { $sort: { "_createdDate": -1 } }, { $skip: ((_page - 1) * _size) }, { $limit: (_size) } ]) .toArray()); } else { getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $project: { "salesContractNo": 1, "_createdDate": 1, "orderNo": 1, "orderType": "$orderType.name", "processType": "$processType.name", "buyer": "$buyer.name", "buyerType": "$buyer.type", "orderQuantity": "$orderQuantity", "uom": "$uom.unit", "colorCode": "$details.code", "colorTemplate": "$details.colorTemplate", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", "deliveryDate": "$deliveryDate", "firstname": "$account.profile.firstname", "lastname": "$account.profile.lastname" } }, { $sort: { "_createdDate": -1 } } ]) .toArray()); } Promise.all(getPrdOrder).then(result => { var resCount = result[0]; var count = resCount.length > 0 ? resCount[0].count : 0; var prodOrders = result[1]; prodOrders = [].concat.apply([], prodOrders); var jobsGetDailyOperation = []; for (var prodOrder of prodOrders) { jobsGetDailyOperation.push(this.dailyOperationCollection.aggregate([ { $match: { "type": "input", "_deleted": false, "kanban.selectedProductionOrderDetail.code": prodOrder.colorCode, "kanban.productionOrder.orderNo": prodOrder.orderNo } }, { $project: { "orderNo": "$kanban.productionOrder.orderNo", "kanbanCode": "$kanban.code", "colorCode": "$kanban.selectedProductionOrderDetail.code", "input": 1 } } ]).toArray()); } if (jobsGetDailyOperation.length == 0) { jobsGetDailyOperation.push(Promise.resolve(null)) } Promise.all(jobsGetDailyOperation).then(dailyOperations => {//Get DailyOperation dailyOperations = [].concat.apply([], dailyOperations); dailyOperations = this.cleanUp(dailyOperations); var jobsGetQC = []; for (var prodOrder of prodOrders) { var _dailyOperations = dailyOperations.filter(function (dailyOperation) { return dailyOperation.orderNo === prodOrder.orderNo && dailyOperation.colorCode === prodOrder.colorCode; }) var filters = ["orderNo", "colorCode", "kanbanCode"]; _dailyOperations = this.removeDuplicates(_dailyOperations, filters); if (_dailyOperations.length > 0) { var kanbanCodes = []; _dailyOperations.some(function (dailyOperation, idx) { kanbanCodes.push(dailyOperation.kanbanCode); }); var sum = _dailyOperations .map(dailyOperation => dailyOperation.input) .reduce(function (prev, curr, index, arr) { return prev + curr; }, 0); for (var dailyOperation of _dailyOperations) { jobsGetQC.push(this.fabricQualityControlCollection.aggregate([ { $match: { "_deleted": false, "productionOrderNo": dailyOperation.orderNo, "kanbanCode": dailyOperation.kanbanCode } }, { $project: { "productionOrderNo": 1, "kanbanCode": 1, "orderQuantityQC": { $sum: "$fabricGradeTests.initLength" } } } ]).toArray()); } prodOrder.input = sum; prodOrder.kanbanCodes = kanbanCodes; } else { prodOrder.input = 0; prodOrder.kanbanCodes = []; } } if (jobsGetQC.length == 0) { jobsGetQC.push(Promise.resolve(null)) } Promise.all(jobsGetQC).then(qualityControls => {//Get QC qualityControls = [].concat.apply([], qualityControls); qualityControls = this.cleanUp(qualityControls); for (var prodOrder of prodOrders) { var _qualityControls = qualityControls.filter(function (qualityControl) { return qualityControl.productionOrderNo === prodOrder.orderNo && prodOrder.kanbanCodes.includes(qualityControl.kanbanCode); }) // filters = ["productionOrderNo", "kanbanCode"]; // _qualityControls = this.removeDuplicates(_qualityControls, filters); var _orderQuantityQC = 0 if (_qualityControls.length > 0) { _orderQuantityQC = _qualityControls .map(qualityControl => qualityControl.orderQuantityQC) .reduce(function (prev, curr, index, arr) { return prev + curr; }, 0); } prodOrder.orderQuantityQC = _orderQuantityQC; if (prodOrder.orderQuantityQC > 0) { prodOrder.status = "Sudah dalam pemeriksaan kain"; } else if (prodOrder.input > 0) { prodOrder.status = "Sudah dalam produksi"; } else if (prodOrder.input == 0) { prodOrder.status = "Belum dalam produksi"; } prodOrder.detail = `${prodOrder.quantity} di spp\n${prodOrder.input} di produksi\n${prodOrder.orderQuantityQC} di pemeriksaan`; } var results = { data: prodOrders, count: prodOrders.length, size: 20, total: count, page: (_page * _size) / _size }; resolve(results); }) }) }) }); } getDetailReport(salesContractNo) { return new Promise((resolve, reject) => { var qry = Object.assign({}); var data = {} if (salesContractNo) { Object.assign(qry, { "salesContractNo": { "$regex": (new RegExp(salesContractNo, "i")) } }); } qry = Object.assign(qry, { _deleted: false }); this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $group: { "_id": "$orderNo", "salesContractNo": { "$first": "$salesContractNo" }, "orderQuantity": { "$first": "$orderQuantity" }, "uom": { "$first": "$uom.unit" }, "details": { "$push": { "colorTemplate": "$details.colorTemplate", "colorCode": "$details.code", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", } } } }, { $sort: { "_createdDate": -1 } } ]) .toArray().then(prodOrders => { prodOrders = [].concat.apply([], prodOrders); Object.assign(data, { productionOrders: prodOrders }); var _prodOrders = prodOrders.map((prodOrder) => { return prodOrder.details.map((detail) => { return { salesContractNo: prodOrder.salesContractNo, orderNo: prodOrder._id, colorCode: detail.colorCode } }) }) _prodOrders = [].concat.apply([], _prodOrders); var filters = ["orderNo"]; _prodOrders = this.removeDuplicates(_prodOrders, filters); var jobsGetDailyOperation = []; for (var prodOrder of _prodOrders) { jobsGetDailyOperation.push(this.dailyOperationCollection.aggregate([ { $unwind: "$kanban.instruction.steps" }, { $match: { "type": "input", "_deleted": false, // "kanban.selectedProductionOrderDetail.code": prodOrder.colorCode, "kanban.productionOrder.orderNo": prodOrder.orderNo } }, { $project: { "orderNo": "$kanban.productionOrder.orderNo", "kanbanCode": "$kanban.code", "machine": "$machine.name", "color": "$kanban.selectedProductionOrderDetail.colorRequest", "step": "$kanban.instruction.steps.process", "cmp": { "$eq": ["$stepId", "$kanban.instruction.steps._id"] }, "qty": "$input" } }, { $match: { "cmp": true } }, { $group: { "_id": { "orderNo": "$orderNo", "machine": "$machine", "step": "$step", "color": "$color" }, "kanbanCode": { $first: "$kanbanCode" }, "qty": { "$sum": "$qty" } } }, { $sort:{"_createdDate":1} } ]).toArray()); } if (jobsGetDailyOperation.length == 0) { jobsGetDailyOperation.push(Promise.resolve(null)) } Promise.all(jobsGetDailyOperation).then(dailyOperations => { dailyOperations = [].concat.apply([], dailyOperations); if (dailyOperations.length > 0) { for (var dailyOperation of dailyOperations) { var _do = dailyOperation._id; Object.assign(dailyOperation, _do); } } dailyOperations = this.cleanUp(dailyOperations); Object.assign(data, { dailyOperations: dailyOperations }); var jobsGetQC = [] var filters = ["orderNo", "colorCode", "kanbanCode"]; var _dailyOperations = this.removeDuplicates(dailyOperations, filters); for (var dailyOperation of _dailyOperations) { jobsGetQC.push(this.fabricQualityControlCollection.aggregate([ { $unwind: "$fabricGradeTests" }, { $match: { "_deleted": false, "productionOrderNo": dailyOperation.orderNo, // "kanbanCode": dailyOperation.kanbanCode } }, { $group: { "_id": "$fabricGradeTests.grade", "productionOrderNo": { "$first": "$productionOrderNo" }, "qty": { "$sum": "$fabricGradeTests.initLength" }, } }, { $sort: { "_id": 1 } } ]).toArray()); } if (jobsGetQC.length == 0) { jobsGetQC.push(Promise.resolve(null)) } Promise.all(jobsGetQC).then(qualityControls => { qualityControls = [].concat.apply([], qualityControls); qualityControls = this.cleanUp(qualityControls); Object.assign(data, { qualityControls: qualityControls }); resolve(data); }) }) }) }); } removeDuplicates(arr, filters) { var new_arr = []; var lookup = {}; for (var i in arr) { var attr = ""; for (var n in filters) { attr += arr[i][filters[n]]; } if (!lookup[attr]) { lookup[attr] = arr[i]; } } for (i in lookup) { new_arr.push(lookup[i]); } return new_arr; } cleanUp(input) { var newArr = []; for (var i = 0; i < input.length; i++) { if (input[i]) { newArr.push(input[i]); } } return newArr; } }
src/managers/sales/production-order-manager.js
'use strict' var ObjectId = require("mongodb").ObjectId; require("mongodb-toolkit"); var DLModels = require('dl-models'); var map = DLModels.map; var ProductionOrder = DLModels.sales.ProductionOrder; var ProductionOrderDetail = DLModels.sales.ProductionOrderDetail; var ProductionOrderLampStandard = DLModels.sales.ProductionOrderLampStandard; var LampStandardManager = require('../master/lamp-standard-manager'); var BuyerManager = require('../master/buyer-manager'); var UomManager = require('../master/uom-manager'); var ProductManager = require('../master/product-manager'); var ProcessTypeManager = require('../master/process-type-manager'); var OrderTypeManager = require('../master/order-type-manager'); var ColorTypeManager = require('../master/color-type-manager'); var FinishTypeManager = require('../master/finish-type-manager'); var StandardTestManager = require('../master/standard-test-manager'); var MaterialConstructionManager = require('../master/material-construction-manager'); var YarnMaterialManager = require('../master/yarn-material-manager'); var AccountManager = require('../auth/account-manager'); var BaseManager = require('module-toolkit').BaseManager; var i18n = require('dl-i18n'); var generateCode = require("../../utils/code-generator"); var assert = require('assert'); module.exports = class ProductionOrderManager extends BaseManager { constructor(db, user) { super(db, user); this.collection = this.db.collection(map.sales.collection.ProductionOrder); this.dailyOperationCollection = this.db.collection(map.production.finishingPrinting.collection.DailyOperation); this.fabricQualityControlCollection = this.db.use(map.production.finishingPrinting.qualityControl.defect.collection.FabricQualityControl); this.LampStandardManager = new LampStandardManager(db, user); this.BuyerManager = new BuyerManager(db, user); this.UomManager = new UomManager(db, user); this.ProductManager = new ProductManager(db, user); this.ProcessTypeManager = new ProcessTypeManager(db, user); this.ColorTypeManager = new ColorTypeManager(db, user); this.OrderTypeManager = new OrderTypeManager(db, user); this.MaterialConstructionManager = new MaterialConstructionManager(db, user); this.YarnMaterialManager = new YarnMaterialManager(db, user); this.FinishTypeManager = new FinishTypeManager(db, user); this.StandardTestManager = new StandardTestManager(db, user); this.AccountManager = new AccountManager(db, user); } _getQuery(paging) { var deletedFilter = { _deleted: false }, keywordFilter = {}; var query = {}; if (paging.keyword) { var regex = new RegExp(paging.keyword, "i"); var filterSalesContract = { 'salesContractNo': { '$regex': regex } }; var filterOrderNo = { 'orderNo': { '$regex': regex } }; var filterBuyerName = { 'buyer.name': { '$regex': regex } }; var filterBuyerType = { 'buyer.type': { '$regex': regex } }; var filterProcessType = { 'processType.name': { '$regex': regex } }; keywordFilter = { '$or': [filterSalesContract, filterOrderNo, filterBuyerName, filterBuyerType, filterProcessType] }; } query = { '$and': [deletedFilter, paging.filter, keywordFilter] } return query; } _beforeInsert(productionOrder) { productionOrder.orderNo = productionOrder.orderNo === "" ? generateCode() : productionOrder.orderNo; productionOrder._createdDate = new Date(); return Promise.resolve(productionOrder); } _validate(productionOrder) { var errors = {}; var valid = productionOrder; var getProductionOrder = this.collection.singleOrDefault({ _id: { '$ne': new ObjectId(valid._id) }, orderNo: valid.orderNo }); var getBuyer = ObjectId.isValid(valid.buyerId) ? this.BuyerManager.getSingleByIdOrDefault(valid.buyerId) : Promise.resolve(null); var getUom = valid.uom && ObjectId.isValid(valid.uomId) ? this.UomManager.getSingleByIdOrDefault(valid.uomId) : Promise.resolve(null); var getProduct = ObjectId.isValid(valid.materialId) ? this.ProductManager.getSingleByIdOrDefault(valid.materialId) : Promise.resolve(null); var getProcessType = ObjectId.isValid(valid.processTypeId) ? this.ProcessTypeManager.getSingleByIdOrDefault(valid.processTypeId) : Promise.resolve(null); var getOrderType = ObjectId.isValid(valid.orderTypeId) ? this.OrderTypeManager.getSingleByIdOrDefault(valid.orderTypeId) : Promise.resolve(null); var getFinishType = ObjectId.isValid(valid.finishTypeId) ? this.FinishTypeManager.getSingleByIdOrDefault(valid.finishTypeId) : Promise.resolve(null); var getYarnMaterial = ObjectId.isValid(valid.yarnMaterialId) ? this.YarnMaterialManager.getSingleByIdOrDefault(valid.yarnMaterialId) : Promise.resolve(null); var getStandardTest = ObjectId.isValid(valid.standardTestId) ? this.StandardTestManager.getSingleByIdOrDefault(valid.standardTestId) : Promise.resolve(null); var getMaterialConstruction = ObjectId.isValid(valid.materialConstructionId) ? this.MaterialConstructionManager.getSingleByIdOrDefault(valid.materialConstructionId) : Promise.resolve(null); var getAccount = ObjectId.isValid(valid.accountId) ? this.AccountManager.getSingleByIdOrDefault(valid.accountId) : Promise.resolve(null); valid.details = valid.details || []; var getColorTypes = []; for (var detail of valid.details) { if (ObjectId.isValid(detail.colorTypeId)) { var color = ObjectId.isValid(detail.colorTypeId) ? this.ColorTypeManager.getSingleByIdOrDefault(detail.colorTypeId) : Promise.resolve(null); getColorTypes.push(color); } } valid.lampStandards = valid.lampStandards || []; var getLampStandards = []; for (var lamp of valid.lampStandards) { if (ObjectId.isValid(lamp.lampStandardId)) { var lamps = ObjectId.isValid(lamp.lampStandardId) ? this.LampStandardManager.getSingleByIdOrDefault(lamp.lampStandardId) : Promise.resolve(null); getLampStandards.push(lamps); } } return Promise.all([getProductionOrder, getBuyer, getUom, getProduct, getProcessType, getOrderType, getFinishType, getYarnMaterial, getStandardTest, getMaterialConstruction, getAccount].concat(getColorTypes, getLampStandards)) .then(results => { var _productionOrder = results[0]; var _buyer = results[1]; var _uom = results[2]; var _material = results[3]; var _process = results[4]; var _order = results[5]; var _finish = results[6]; var _yarn = results[7]; var _standard = results[8]; var _construction = results[9]; var _account = results[10]; var _colors = results.slice(11, 11 + getColorTypes.length); var _lampStandards = results.slice(11 + getColorTypes.length, results.length); if (_productionOrder) { errors["orderNo"] = i18n.__("ProductionOrder.orderNo.isExist:%s is Exist", i18n.__("Product.orderNo._:orderNo")); //"orderNo sudah ada"; } if (valid.uom) { if (!_uom) errors["uom"] = i18n.__("ProductionOrder.uom.isRequired:%s is required", i18n.__("Product.uom._:Uom")); //"Satuan tidak boleh kosong"; } else errors["uom"] = i18n.__("ProductionOrder.uom.isRequired:%s is required", i18n.__("Product.uom._:Uom")); //"Satuan tidak boleh kosong"; if (!valid.salesContractNo || valid.salesContractNo === '') { errors["salesContractNo"] = i18n.__("ProductionOrder.salesContractNo.isRequired:%s is required", i18n.__("ProductionOrder.salesContractNo._:SalesContractNo")); //"salesContractNo tidak boleh kosong"; } if (!_material) errors["material"] = i18n.__("ProductionOrder.material.isRequired:%s is not exists", i18n.__("ProductionOrder.material._:Material")); //"material tidak boleh kosong"; if (!_process) errors["processType"] = i18n.__("ProductionOrder.processType.isRequired:%s is not exists", i18n.__("ProductionOrder.processType._:ProcessType")); //"processType tidak boleh kosong"; if (!_order) errors["orderType"] = i18n.__("ProductionOrder.orderType.isRequired:%s is not exists", i18n.__("ProductionOrder.orderType._:OrderType")); //"orderType tidak boleh kosong"; if (!_yarn) errors["yarnMaterial"] = i18n.__("ProductionOrder.yarnMaterial.isRequired:%s is not exists", i18n.__("ProductionOrder.yarnMaterial._:YarnMaterial")); //"yarnMaterial tidak boleh kosong"; if (!_construction) errors["materialConstruction"] = i18n.__("ProductionOrder.materialConstruction.isRequired:%s is not exists", i18n.__("ProductionOrder.materialConstruction._:MaterialConstruction")); //"materialConstruction tidak boleh kosong"; if (!_finish) errors["finishType"] = i18n.__("ProductionOrder.finishType.isRequired:%s is not exists", i18n.__("ProductionOrder.finishType._:FinishType")); //"finishType tidak boleh kosong"; if (!_standard) errors["standardTest"] = i18n.__("ProductionOrder.standardTest.isRequired:%s is not exists", i18n.__("ProductionOrder.standardTest._:StandardTest")); //"standardTest tidak boleh kosong"; if (!_account) { errors["account"] = i18n.__("ProductionOrder.account.isRequired:%s is not exists", i18n.__("ProductionOrder.account._:Account")); //"account tidak boleh kosong"; } if (!valid.packingInstruction || valid.packingInstruction === '') { errors["packingInstruction"] = i18n.__("ProductionOrder.packingInstruction.isRequired:%s is required", i18n.__("ProductionOrder.packingInstruction._:PackingInstruction")); //"PackingInstruction tidak boleh kosong"; } if (!valid.materialOrigin || valid.materialOrigin === '') { errors["materialOrigin"] = i18n.__("ProductionOrder.materialOrigin.isRequired:%s is required", i18n.__("ProductionOrder.materialOrigin._:MaterialOrigin")); //"materialOrigin tidak boleh kosong"; } if (!valid.finishWidth || valid.finishWidth === '') { errors["finishWidth"] = i18n.__("ProductionOrder.finishWidth.isRequired:%s is required", i18n.__("ProductionOrder.finishWidth._:FinishWidth")); //"finishWidth tidak boleh kosong"; } if (!valid.sample || valid.sample === '') { errors["sample"] = i18n.__("ProductionOrder.sample.isRequired:%s is required", i18n.__("ProductionOrder.sample._:Sample")); //"sample tidak boleh kosong"; } if (!valid.handlingStandard || valid.handlingStandard === '') { errors["handlingStandard"] = i18n.__("ProductionOrder.handlingStandard.isRequired:%s is required", i18n.__("ProductionOrder.handlingStandard._:HandlingStandard")); //"handlingStandard tidak boleh kosong"; } if (!valid.shrinkageStandard || valid.shrinkageStandard === '') { errors["shrinkageStandard"] = i18n.__("ProductionOrder.shrinkageStandard.isRequired:%s is required", i18n.__("ProductionOrder.shrinkageStandard._:ShrinkageStandard")); //"shrinkageStandard tidak boleh kosong"; } if (!valid.deliveryDate || valid.deliveryDate === "") { errors["deliveryDate"] = i18n.__("ProductionOrder.deliveryDate.isRequired:%s is required", i18n.__("ProductionOrder.deliveryDate._:deliveryDate")); //"deliveryDate tidak boleh kosong"; } // else{ // valid.deliveryDate=new Date(valid.deliveryDate); // var today=new Date(); // today.setHours(0,0,0,0); // if(today>valid.deliveryDate){ // errors["deliveryDate"] = i18n.__("ProductionOrder.deliveryDate.shouldNot:%s should not be less than today's date", i18n.__("ProductionOrder.deliveryDate._:deliveryDate")); //"deliveryDate tidak boleh kurang dari tanggal hari ini"; // } // } if (_order) { if (_order.name.trim().toLowerCase() == "printing") { if (!valid.RUN || valid.RUN == "") { errors["RUN"] = i18n.__("ProductionOrder.RUN.isRequired:%s is required", i18n.__("ProductionOrder.RUN._:RUN")); //"RUN tidak boleh kosong"; } if (valid.RUN && valid.RUN != "Tanpa RUN") { if (!valid.RUNWidth || valid.RUNWidth.length <= 0) { errors["RUNWidth"] = i18n.__("ProductionOrder.RUNWidth.isRequired:%s is required", i18n.__("ProductionOrder.RUNWidth._:RUNWidth")); //"RUNWidth tidak boleh kosong"; } if (valid.RUNWidth.length > 0) { for (var r = 0; r < valid.RUNWidth.length; r++) { if (valid.RUNWidth[r] <= 0) { errors["RUNWidth"] = i18n.__("ProductionOrder.RUNWidth.shouldNot:%s should not be less than or equal zero", i18n.__("ProductionOrder.RUNWidth._:RUNWidth")); //"RUNWidth tidak boleh nol"; break; } } } } if (!valid.designNumber || valid.designNumber == "") { errors["designNumber"] = i18n.__("ProductionOrder.designNumber.isRequired:%s is required", i18n.__("ProductionOrder.designNumber._:DesignNumber")); //"designNumber tidak boleh kosong"; } if (!valid.designCode || valid.designCode == "") { errors["designCode"] = i18n.__("ProductionOrder.designCode.isRequired:%s is required", i18n.__("ProductionOrder.designCode._:DesignCode")); //"designCode tidak boleh kosong"; } } } if (!_buyer) errors["buyer"] = i18n.__("ProductionOrder.buyer.isRequired:%s is not exists", i18n.__("ProductionOrder.buyer._:Buyer")); //"Buyer tidak boleh kosong"; if (!valid.orderQuantity || valid.orderQuantity === 0) errors["orderQuantity"] = i18n.__("ProductionOrder.orderQuantity.isRequired:%s is required", i18n.__("ProductionOrder.orderQuantity._:OrderQuantity")); //"orderQuantity tidak boleh kosong"; else { var totalqty = 0; if (valid.details.length > 0) { for (var i of valid.details) { totalqty += i.quantity; } } if (valid.orderQuantity != totalqty) { errors["orderQuantity"] = i18n.__("ProductionOrder.orderQuantity.shouldNot:%s should equal SUM quantity in details", i18n.__("ProductionOrder.orderQuantity._:OrderQuantity")); //"orderQuantity tidak boleh berbeda dari total jumlah detail"; } } if (!valid.shippingQuantityTolerance || valid.shippingQuantityTolerance === 0) errors["shippingQuantityTolerance"] = i18n.__("ProductionOrder.shippingQuantityTolerance.isRequired:%s is required", i18n.__("ProductionOrder.shippingQuantityTolerance._:ShippingQuantityTolerance")); //"shippingQuantityTolerance tidak boleh kosong"; else if (valid.shippingQuantityTolerance > 100) { errors["shippingQuantityTolerance"] = i18n.__("ProductionOrder.shippingQuantityTolerance.shouldNot:%s should not more than 100", i18n.__("ProductionOrder.shippingQuantityTolerance._:ShippingQuantityTolerance")); //"shippingQuantityTolerance tidak boleh lebih dari 100"; } if (!valid.materialWidth || valid.materialWidth === "") errors["materialWidth"] = i18n.__("ProductionOrder.materialWidth.isRequired:%s is required", i18n.__("ProductionOrder.materialWidth._:MaterialWidth")); //"materialWidth tidak boleh kosong"; valid.lampStandards = valid.lampStandards || []; if (valid.lampStandards && valid.lampStandards.length <= 0) { errors["lampStandards"] = i18n.__("ProductionOrder.lampStandards.isRequired:%s is required", i18n.__("ProductionOrder.lampStandards._:LampStandards")); //"Harus ada minimal 1 lampStandard"; } else if (valid.lampStandards.length > 0) { var lampErrors = []; for (var lamp of valid.lampStandards) { var lampError = {}; if (!_lampStandards || _lampStandards.length <= 0) { lampError["lampStandards"] = i18n.__("ProductionOrder.lampStandards.lampStandard.isRequired:%s is not exists", i18n.__("ProductionOrder.lampStandards.lampStandard._:LampStandard")); //"lampStandard tidak boleh kosong"; } if (!lamp.lampStandard._id) { lampError["lampStandards"] = i18n.__("ProductionOrder.lampStandards.lampStandard.isRequired:%s is not exists", i18n.__("ProductionOrder.lampStandards.lampStandard._:LampStandard")); //"lampStandard tidak boleh kosong"; } if (Object.getOwnPropertyNames(lampError).length > 0) lampErrors.push(lampError); } if (lampErrors.length > 0) errors.lampStandards = lampErrors; } valid.details = valid.details || []; if (valid.details && valid.details.length <= 0) { errors["details"] = i18n.__("ProductionOrder.details.isRequired:%s is required", i18n.__("ProductionOrder.details._:Details")); //"Harus ada minimal 1 detail"; } else if (valid.details.length > 0) { var detailErrors = []; var totalqty = 0; for (var i of valid.details) { totalqty += i.quantity; } for (var detail of valid.details) { var detailError = {}; detail.code = generateCode(); if (!detail.colorRequest || detail.colorRequest == "") detailError["colorRequest"] = i18n.__("ProductionOrder.details.colorRequest.isRequired:%s is required", i18n.__("ProductionOrder.details.colorRequest._:ColorRequest")); //"colorRequest tidak boleh kosong"; if (detail.quantity <= 0) detailError["quantity"] = i18n.__("ProductionOrder.details.quantity.isRequired:%s is required", i18n.__("ProductionOrder.details.quantity._:Quantity")); //Jumlah barang tidak boleh kosong"; if (valid.orderQuantity != totalqty) detailError["total"] = i18n.__("ProductionOrder.details.quantity.shouldNot:%s Total should equal Order Quantity", i18n.__("ProductionOrder.details.quantity._:Quantity")); //Jumlah barang tidak boleh berbeda dari jumlah order"; if (!_uom) detailError["uom"] = i18n.__("ProductionOrder.details.uom.isRequired:%s is not exists", i18n.__("ProductionOrder.details.uom._:Uom")); //"satuan tidak boleh kosong"; if (_uom) { detail.uomId = new ObjectId(_uom._id); } if (!detail.colorTemplate || detail.colorTemplate == "") detailError["colorTemplate"] = i18n.__("ProductionOrder.details.colorTemplate.isRequired:%s is required", i18n.__("ProductionOrder.details.colorTemplate._:ColorTemplate")); //"colorTemplate tidak boleh kosong"; } if (_order) { if (_order.name.toLowerCase() == "yarn dyed" || _order.name.toLowerCase() == "printing") { _colors = {}; } else { if (!_colors) detailError["colorType"] = i18n.__("ProductionOrder.details.colorType.isRequired:%s is required", i18n.__("ProductionOrder.details.colorType._:ColorType")); //"colorType tidak boleh kosong"; else if (!detail.colorType) { detailError["colorType"] = i18n.__("ProductionOrder.details.colorType.isRequired:%s is required", i18n.__("ProductionOrder.details.colorType._:ColorType")); //"colorType tidak boleh kosong"; } } if (Object.getOwnPropertyNames(detailError).length > 0) detailErrors.push(detailError); } if (detailErrors.length > 0) errors.details = detailErrors; } if (!valid.orderNo || valid.orderNo === '') { valid.orderNo = generateCode(); } if (_buyer) { valid.buyerId = new ObjectId(_buyer._id); } if (_uom) { valid.uomId = new ObjectId(_uom._id); } if (_process) { valid.processTypeId = new ObjectId(_process._id); } if (_account) { valid.accountId = new ObjectId(_account._id); } if (valid.lampStandards.length > 0) { for (var lamp of valid.lampStandards) { for (var _lampStandard of _lampStandards) { if (_lampStandard) { if (lamp.lampStandardId.toString() === _lampStandard._id.toString()) { lamp.lampStandardId = _lampStandard._id; lamp.lampStandard = _lampStandard; } } } } } if (_order) { valid.orderTypeId = new ObjectId(_order._id); if (_order.name.toLowerCase() != "printing") { valid.RUN = ""; valid.RUNWidth = []; valid.designCode = ""; valid.designNumber = ""; valid.articleFabricEdge = ""; } if (_order.name.toLowerCase() == "yarn dyed" || _order.name.toLowerCase() == "printing") { for (var detail of valid.details) { detail.colorTypeId = null; detail.colorType = null; } } else { for (var detail of valid.details) { if (detail.colorType) { for (var _color of _colors) { if (_color) { if (detail.colorTypeId.toString() === _color._id.toString()) { detail.colorTypeId = _color._id; detail.colorType = _color; } } } } } } } if (_material) { valid.material = _material; valid.materialId = new ObjectId(_material._id); } if (_finish) { valid.finishType = _finish; valid.finishTypeId = new ObjectId(_finish._id); } if (_yarn) { valid.yarnMaterial = _yarn; valid.yarnMaterialId = new ObjectId(_yarn._id); } if (_standard) { valid.standardTest = _standard; valid.standardTestId = _standard._id; } if (_construction) { valid.materialConstruction = _construction; valid.materialConstructionId = _construction._id; } valid.deliveryDate = new Date(valid.deliveryDate); if (Object.getOwnPropertyNames(errors).length > 0) { var ValidationError = require('module-toolkit').ValidationError; return Promise.reject(new ValidationError('data does not pass validation', errors)); } if (!valid.stamp) { valid = new ProductionOrder(valid); } valid.stamp(this.user.username, "manager"); return Promise.resolve(valid); }); } _createIndexes() { var dateIndex = { name: `ix_${map.sales.collection.ProductionOrder}__updatedDate`, key: { _updatedDate: -1 } } var noIndex = { name: `ix_${map.sales.collection.ProductionOrder}_orderNo`, key: { orderNo: 1 }, unique: true } return this.collection.createIndexes([dateIndex, noIndex]); } pdf(id) { return new Promise((resolve, reject) => { this.getSingleById(id) .then(productionOrder => { var getDefinition = require("../../pdf/definitions/production-order"); var definition = getDefinition(productionOrder); var generatePdf = require("../../pdf/pdf-generator"); generatePdf(definition) .then(binary => { resolve(binary); }) .catch(e => { reject(e); }); }) .catch(e => { reject(e); }); }); } getSingleProductionOrderDetail(detailCode) { return new Promise((resolve, reject) => { var query = { "details": { "$elemMatch": { "code": detailCode } } }; this.collection.singleOrDefault(query).then((result) => { var dataReturn = {}; if (result) { for (var detail of result.details) { if (detailCode === detail.code) dataReturn = new ProductionOrderDetail(detail); } } resolve(dataReturn); }); }); } getReport(query) { return new Promise((resolve, reject) => { if(!query.size){ query.size=20; } if(!query.page){ query.page=1; } var _page = parseInt(query.page); var _size = parseInt(query.size); var qry = Object.assign({}); var filter = query.filter || {}; if (filter.salesContractNo) { Object.assign(qry, { "salesContractNo": { "$regex": (new RegExp(filter.salesContractNo, "i")) } }); } if (filter.orderNo) { Object.assign(qry, { "orderNo": { "$regex": (new RegExp(filter.orderNo, "i")) } }); } if (filter.orderTypeId) { Object.assign(qry, { "orderTypeId": (new ObjectId(filter.orderTypeId)) }); } if (filter.processTypeId) { Object.assign(qry, { "processTypeId": (new ObjectId(filter.processTypeId)) }); } if (filter.buyerId) { Object.assign(qry, { "buyerId": (new ObjectId(filter.buyerId)) }); } if (filter.accountId) { Object.assign(qry, { "accountId": (new ObjectId(filter.accountId)) }); } if (filter.sdate && filter.edate) { Object.assign(qry, { "_createdDate": { "$gte": new Date(`${filter.sdate} 00:00:00`), "$lte": new Date(`${filter.edate} 23:59:59`) } }); } qry = Object.assign(qry, { _deleted: false }); var getPrdOrder = []; getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $group: { _id: null, count: { $sum: 1 } } } ]) .toArray()); if ((query.accept || '').toString().indexOf("application/xls") < 0) { getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $project: { "salesContractNo": 1, "_createdDate": 1, "orderNo": 1, "orderType": "$orderType.name", "processType": "$processType.name", "buyer": "$buyer.name", "buyerType": "$buyer.type", "orderQuantity": "$orderQuantity", "uom": "$uom.unit", "colorCode": "$details.code", "colorTemplate": "$details.colorTemplate", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", "deliveryDate": "$deliveryDate", "firstname": "$account.profile.firstname", "lastname": "$account.profile.lastname" } }, { $sort: { "_createdDate": -1 } }, { $skip: ((_page - 1) * _size) }, { $limit: (_size) } ]) .toArray()); } else { getPrdOrder.push(this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $project: { "salesContractNo": 1, "_createdDate": 1, "orderNo": 1, "orderType": "$orderType.name", "processType": "$processType.name", "buyer": "$buyer.name", "buyerType": "$buyer.type", "orderQuantity": "$orderQuantity", "uom": "$uom.unit", "colorCode": "$details.code", "colorTemplate": "$details.colorTemplate", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", "deliveryDate": "$deliveryDate", "firstname": "$account.profile.firstname", "lastname": "$account.profile.lastname" } }, { $sort: { "_createdDate": -1 } } ]) .toArray()); } Promise.all(getPrdOrder).then(result => { var resCount = result[0]; var count = resCount.length > 0 ? resCount[0].count : 0; var prodOrders = result[1]; prodOrders = [].concat.apply([], prodOrders); var jobsGetDailyOperation = []; for (var prodOrder of prodOrders) { jobsGetDailyOperation.push(this.dailyOperationCollection.aggregate([ { $match: { "type": "input", "_deleted": false, "kanban.selectedProductionOrderDetail.code": prodOrder.colorCode, "kanban.productionOrder.orderNo": prodOrder.orderNo } }, { $project: { "orderNo": "$kanban.productionOrder.orderNo", "kanbanCode": "$kanban.code", "colorCode": "$kanban.selectedProductionOrderDetail.code", "input": 1 } } ]).toArray()); } if (jobsGetDailyOperation.length == 0) { jobsGetDailyOperation.push(Promise.resolve(null)) } Promise.all(jobsGetDailyOperation).then(dailyOperations => {//Get DailyOperation dailyOperations = [].concat.apply([], dailyOperations); dailyOperations = this.cleanUp(dailyOperations); var jobsGetQC = []; for (var prodOrder of prodOrders) { var _dailyOperations = dailyOperations.filter(function (dailyOperation) { return dailyOperation.orderNo === prodOrder.orderNo && dailyOperation.colorCode === prodOrder.colorCode; }) var filters = ["orderNo", "colorCode", "kanbanCode"]; _dailyOperations = this.removeDuplicates(_dailyOperations, filters); if (_dailyOperations.length > 0) { var kanbanCodes = []; _dailyOperations.some(function (dailyOperation, idx) { kanbanCodes.push(dailyOperation.kanbanCode); }); var sum = _dailyOperations .map(dailyOperation => dailyOperation.input) .reduce(function (prev, curr, index, arr) { return prev + curr; }, 0); for (var dailyOperation of _dailyOperations) { jobsGetQC.push(this.fabricQualityControlCollection.aggregate([ { $match: { "_deleted": false, "productionOrderNo": dailyOperation.orderNo, "kanbanCode": dailyOperation.kanbanCode } }, { $project: { "productionOrderNo": 1, "kanbanCode": 1, "orderQuantityQC": { $sum: "$fabricGradeTests.initLength" } } } ]).toArray()); } prodOrder.input = sum; prodOrder.kanbanCodes = kanbanCodes; } else { prodOrder.input = 0; prodOrder.kanbanCodes = []; } } if (jobsGetQC.length == 0) { jobsGetQC.push(Promise.resolve(null)) } Promise.all(jobsGetQC).then(qualityControls => {//Get QC qualityControls = [].concat.apply([], qualityControls); qualityControls = this.cleanUp(qualityControls); for (var prodOrder of prodOrders) { var _qualityControls = qualityControls.filter(function (qualityControl) { return qualityControl.productionOrderNo === prodOrder.orderNo && prodOrder.kanbanCodes.includes(qualityControl.kanbanCode); }) // filters = ["productionOrderNo", "kanbanCode"]; // _qualityControls = this.removeDuplicates(_qualityControls, filters); var _orderQuantityQC = 0 if (_qualityControls.length > 0) { _orderQuantityQC = _qualityControls .map(qualityControl => qualityControl.orderQuantityQC) .reduce(function (prev, curr, index, arr) { return prev + curr; }, 0); } prodOrder.orderQuantityQC = _orderQuantityQC; if (prodOrder.orderQuantityQC > 0) { prodOrder.status = "Sudah dalam pemeriksaan kain"; } else if (prodOrder.input > 0) { prodOrder.status = "Sudah dalam produksi"; } else if (prodOrder.input == 0) { prodOrder.status = "Belum dalam produksi"; } prodOrder.detail = `${prodOrder.quantity} di spp\n${prodOrder.input} di produksi\n${prodOrder.orderQuantityQC} di pemeriksaan`; } var results = { data: prodOrders, count: prodOrders.length, size: 20, total: count, page: (_page * _size) / _size }; resolve(results); }) }) }) }); } getDetailReport(salesContractNo) { return new Promise((resolve, reject) => { var qry = Object.assign({}); var data = {} if (salesContractNo) { Object.assign(qry, { "salesContractNo": { "$regex": (new RegExp(salesContractNo, "i")) } }); } qry = Object.assign(qry, { _deleted: false }); this.collection .aggregate([ { $unwind: "$details" }, { $match: qry }, { $group: { "_id": "$orderNo", "salesContractNo": { "$first": "$salesContractNo" }, "orderQuantity": { "$first": "$orderQuantity" }, "uom": { "$first": "$uom.unit" }, "details": { "$push": { "colorTemplate": "$details.colorTemplate", "colorCode": "$details.code", "colorRequest": "$details.colorRequest", "colorType": "$details.colorType.name", "quantity": "$details.quantity", "uomDetail": "$details.uom.unit", } } } }, { $sort: { "_createdDate": -1 } } ]) .toArray().then(prodOrders => { prodOrders = [].concat.apply([], prodOrders); Object.assign(data, { productionOrders: prodOrders }); var _prodOrders = prodOrders.map((prodOrder) => { return prodOrder.details.map((detail) => { return { salesContractNo: prodOrder.salesContractNo, orderNo: prodOrder._id, colorCode: detail.colorCode } }) }) _prodOrders = [].concat.apply([], _prodOrders); var filters = ["orderNo"]; _prodOrders = this.removeDuplicates(_prodOrders, filters); var jobsGetDailyOperation = []; for (var prodOrder of _prodOrders) { jobsGetDailyOperation.push(this.dailyOperationCollection.aggregate([ { $unwind: "$kanban.instruction.steps" }, { $match: { "type": "input", "_deleted": false, // "kanban.selectedProductionOrderDetail.code": prodOrder.colorCode, "kanban.productionOrder.orderNo": prodOrder.orderNo } }, { $project: { "orderNo": "$kanban.productionOrder.orderNo", "kanbanCode": "$kanban.code", "machine": "$machine.name", "step": "$kanban.instruction.steps.process", "cmp": { "$eq": ["$stepId", "$kanban.instruction.steps._id"] }, "qty": "$input" } }, { $match: { "cmp": true } } ]).toArray()); } if (jobsGetDailyOperation.length == 0) { jobsGetDailyOperation.push(Promise.resolve(null)) } Promise.all(jobsGetDailyOperation).then(dailyOperations => { dailyOperations = [].concat.apply([], dailyOperations); dailyOperations = this.cleanUp(dailyOperations); Object.assign(data, { dailyOperations: dailyOperations }); var jobsGetQC = [] var filters = ["orderNo", "colorCode", "kanbanCode"]; var _dailyOperations = this.removeDuplicates(dailyOperations, filters); for (var dailyOperation of _dailyOperations) { jobsGetQC.push(this.fabricQualityControlCollection.aggregate([ { $unwind: "$fabricGradeTests" }, { $match: { "_deleted": false, "productionOrderNo": dailyOperation.orderNo } }, { $group: { "_id": "$fabricGradeTests.grade", "productionOrderNo": { "$first": "$productionOrderNo" }, "qty": { "$sum": "$fabricGradeTests.initLength" }, } }, { $sort: { "_id": 1 } } ]).toArray()); } if (jobsGetQC.length == 0) { jobsGetQC.push(Promise.resolve(null)) } Promise.all(jobsGetQC).then(qualityControls => { qualityControls = [].concat.apply([], qualityControls); qualityControls = this.cleanUp(qualityControls); Object.assign(data, { qualityControls: qualityControls }); resolve(data); }) }) }) }); } removeDuplicates(arr, filters) { var new_arr = []; var lookup = {}; for (var i in arr) { var attr = ""; for (var n in filters) { attr += arr[i][filters[n]]; } if (!lookup[attr]) { lookup[attr] = arr[i]; } } for (i in lookup) { new_arr.push(lookup[i]); } return new_arr; } cleanUp(input) { var newArr = []; for (var i = 0; i < input.length; i++) { if (input[i]) { newArr.push(input[i]); } } return newArr; } }
update query
src/managers/sales/production-order-manager.js
update query
<ide><path>rc/managers/sales/production-order-manager.js <ide> <ide> getReport(query) { <ide> return new Promise((resolve, reject) => { <del> if(!query.size){ <del> query.size=20; <del> } <del> if(!query.page){ <del> query.page=1; <add> if (!query.size) { <add> query.size = 20; <add> } <add> if (!query.page) { <add> query.page = 1; <ide> } <ide> var _page = parseInt(query.page); <ide> var _size = parseInt(query.size); <ide> var jobsGetDailyOperation = []; <ide> for (var prodOrder of _prodOrders) { <ide> jobsGetDailyOperation.push(this.dailyOperationCollection.aggregate([ <del> { <add> { <ide> $unwind: "$kanban.instruction.steps" <ide> }, <ide> { <ide> "orderNo": "$kanban.productionOrder.orderNo", <ide> "kanbanCode": "$kanban.code", <ide> "machine": "$machine.name", <add> "color": "$kanban.selectedProductionOrderDetail.colorRequest", <ide> "step": "$kanban.instruction.steps.process", <ide> "cmp": { "$eq": ["$stepId", "$kanban.instruction.steps._id"] }, <ide> "qty": "$input" <ide> }, <ide> { <ide> $match: { "cmp": true } <add> }, { <add> $group: <add> { <add> "_id": { <add> "orderNo": "$orderNo", <add> "machine": "$machine", <add> "step": "$step", <add> "color": "$color" <add> }, <add> "kanbanCode": { $first: "$kanbanCode" }, <add> "qty": { "$sum": "$qty" } <add> } <add> }, <add> { <add> $sort:{"_createdDate":1} <ide> } <ide> ]).toArray()); <ide> } <ide> } <ide> Promise.all(jobsGetDailyOperation).then(dailyOperations => { <ide> dailyOperations = [].concat.apply([], dailyOperations); <add> if (dailyOperations.length > 0) { <add> for (var dailyOperation of dailyOperations) { <add> var _do = dailyOperation._id; <add> Object.assign(dailyOperation, _do); <add> } <add> } <ide> dailyOperations = this.cleanUp(dailyOperations); <ide> Object.assign(data, { dailyOperations: dailyOperations }); <ide> var jobsGetQC = [] <ide> { <ide> $match: { <ide> "_deleted": false, <del> "productionOrderNo": dailyOperation.orderNo <add> "productionOrderNo": dailyOperation.orderNo, <add> // "kanbanCode": dailyOperation.kanbanCode <ide> } <ide> }, { <ide> $group:
Java
mit
65b0c816ef6a9aecdca11f66b376eb2fa39eed3e
0
sweetcode/SweetDB
package de.SweetCode.SweetDB.Table; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import de.SweetCode.SweetDB.DataSet.DataSet; import de.SweetCode.SweetDB.DataSet.Field; import de.SweetCode.SweetDB.DataType.DataType; import de.SweetCode.SweetDB.DataType.DataTypes; import de.SweetCode.SweetDB.SweetDB; import de.SweetCode.SweetDB.Table.Action.InsertAction; import de.SweetCode.SweetDB.Table.Syntax.Syntax; import de.SweetCode.SweetDB.Table.Syntax.SyntaxRule; import de.SweetCode.SweetDB.Table.Syntax.SyntaxRuleBuilder; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; import java.util.*; import java.util.concurrent.*; import java.util.function.Predicate; import java.util.stream.Collectors; /** * Created by Yonas on 29.12.2015. */ public class Table { private SweetDB sweetDB; private ExecutorService executorService = null; private final File path; private final String name; private Syntax syntax = new Syntax(this); private List<DataSet> dataSets = new ArrayList<>(); public Table(SweetDB sweetDB, File path) { this.sweetDB = sweetDB; this.path = path; this.name = path.getName().substring(0, path.getName().indexOf(".")); this.executorService = Executors.newFixedThreadPool(this.sweetDB.getStorageThreads()); } public Table(SweetDB sweetDB, File path, List<SyntaxRule> syntaxRules) { this(sweetDB, path); syntaxRules.forEach(rule -> this.syntax.add(rule)); } public SweetDB getDatabase() { return this.sweetDB; } /** * The path to the file of the table. * @return */ public File getPath() { return this.path; } /** * The name of the table. * @return */ public String getName() { return this.name; } /** * Creates a new InsertAction to insert a new DataSet into the table. * @return */ public InsertAction insert() { return new InsertAction(this.sweetDB, this, this.syntax); } /** * Inserts a DataSet into the table. * @param dataSet */ public boolean insert(DataSet dataSet) { if(!(this.syntax.validate(dataSet))) { if(this.sweetDB.isDebugging()) { throw new IllegalArgumentException(String.format( "Invalid insert query.", this.syntax.getAsString() )); } else { return false; } } this.dataSets.add(dataSet); if(this.sweetDB.isAutosave()) { this.store(); } return true; } /** * Finds a list of DataSets in the table. * @param predicate * @return */ public List<DataSet> find(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).collect(Collectors.toCollection(ArrayList::new)); } public Optional<DataSet> findAny(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).findAny(); } public Optional<DataSet> findFirst(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).findFirst(); } /** * Returns a list with all DataSets from the table. * @return */ public List<DataSet> all() { return this.dataSets; } /** * Drops the table. * @return */ public void drop() { if(this.path.exists()) { this.path.delete(); } Iterator<Table> tableIterator = this.sweetDB.getTables().iterator(); while(tableIterator.hasNext()) { if(tableIterator.next().getName().equals(this.getName())) { tableIterator.remove(); break; } } } /** * Parses the data. * @param data */ public void parse(String data) { Gson gson = new Gson(); try { JsonObject head = gson.fromJson(data, JsonObject.class); Optional<JsonObject> table = Optional.empty(); if(head.has("table")) { table = Optional.of(head.get("table").getAsJsonObject()); } if(table.get().has("syntax")) { JsonArray syntaxRules = table.get().get("syntax").getAsJsonArray(); syntaxRules.forEach(entry -> { JsonObject syntaxRule = entry.getAsJsonObject(); Optional<DataType> dataType = DataTypes.get(syntaxRule.get("dataType").getAsString()); if(dataType.isPresent()) { this.syntax.add( SyntaxRuleBuilder.create() .fieldName(syntaxRule.get("field").getAsString()) .dataType(dataType.get()) .isUnique(syntaxRule.get("isUnique").getAsBoolean()) .isNullable(syntaxRule.get("isNullable").getAsBoolean()) .isAutoincrement(syntaxRule.get("isAutoincrement").getAsBoolean()) .build() ); } else { throw new IllegalArgumentException(String.format( "\"%s\" is not a valid DataType. Please check the syntax of \"%s\".", syntaxRule.get("dataType").getAsString(), this.getName() )); } }); if(head.has("data") && !(this.syntax.getSyntax().isEmpty())) { JsonArray dataSets = head.get("data").getAsJsonArray(); dataSets.forEach(entry -> { if(!(this.syntax.parseValidation(entry.getAsJsonObject()))) { throw new IllegalArgumentException(String.format( "Invalid syntax in \"%s\" -> %s expected %s. Missing field(s): %s", this.getName(), entry.toString(), this.syntax.getAsString(), String.join(", ", this.syntax.missingFields(entry.getAsJsonObject())) )); } List<Field> fields = entry.getAsJsonObject().entrySet().stream().map(field -> new Field(this.sweetDB, this, field.getKey(), this.syntax.get(field.getKey()).getDataType().parse((field.getValue().isJsonNull() ? null : field.getValue().getAsString())))).collect(Collectors.toList()); this.dataSets.add(new DataSet(fields)); }); } } } catch (Exception exception) { exception.printStackTrace(); //TODO invalid syntax } } /** * Stores the data of the table in the file. */ public void store() { if(this.executorService.isShutdown()) { this.executorService = Executors.newFixedThreadPool(this.sweetDB.getStorageThreads()); } Future<?> task = this.executorService.submit(() -> { JsonObject headData = new JsonObject(); JsonObject tableData = new JsonObject(); JsonArray syntaxData = new JsonArray(); for(Map.Entry<String, SyntaxRule> entry : syntax.getSyntax().entrySet()) { JsonObject syntaxRule = new JsonObject(); syntaxRule.addProperty("field", entry.getKey()); syntaxRule.addProperty("dataType", entry.getValue().getDataType().getName()); syntaxRule.addProperty("isNullable", entry.getValue().isNullable()); syntaxRule.addProperty("isUnique", entry.getValue().isUnique()); syntaxRule.addProperty("isAutoincrement", entry.getValue().isAutoincrement()); syntaxData.add(syntaxRule); } JsonArray data = new JsonArray(); for(DataSet entry : dataSets) { JsonObject dataEntry = new JsonObject(); for(Field field : entry.getFields()) { dataEntry.addProperty(field.getName(), (field.getValue() == null ? null : field.getValue().toString())); } data.add(dataEntry); } tableData.add("syntax", syntaxData); headData.add("table", tableData); headData.add("data", data); try { if(!(path.exists())) { path.createNewFile(); } FileUtils.write(path, headData.toString(), "UTF-8", false); } catch (IOException e) { e.printStackTrace(); //TODO } }); try { task.get(30, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } catch (TimeoutException e) { task.cancel(true); } this.executorService.shutdown(); } }
src/main/java/de/SweetCode/SweetDB/Table/Table.java
package de.SweetCode.SweetDB.Table; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import de.SweetCode.SweetDB.DataSet.DataSet; import de.SweetCode.SweetDB.DataSet.Field; import de.SweetCode.SweetDB.DataType.DataType; import de.SweetCode.SweetDB.DataType.DataTypes; import de.SweetCode.SweetDB.SweetDB; import de.SweetCode.SweetDB.Table.Action.InsertAction; import de.SweetCode.SweetDB.Table.Syntax.Syntax; import de.SweetCode.SweetDB.Table.Syntax.SyntaxRule; import de.SweetCode.SweetDB.Table.Syntax.SyntaxRuleBuilder; import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; import java.util.*; import java.util.concurrent.*; import java.util.function.Predicate; import java.util.stream.Collectors; /** * Created by Yonas on 29.12.2015. */ public class Table { private SweetDB sweetDB; private ExecutorService executorService = null; private final File path; private final String name; private Syntax syntax = new Syntax(this); private List<DataSet> dataSets = new ArrayList<>(); public Table(SweetDB sweetDB, File path) { this.sweetDB = sweetDB; this.path = path; this.name = path.getName().substring(0, path.getName().indexOf(".")); this.executorService = Executors.newFixedThreadPool(this.sweetDB.getStorageThreads()); } public Table(SweetDB sweetDB, File path, List<SyntaxRule> syntaxRules) { this(sweetDB, path); syntaxRules.forEach(rule -> this.syntax.add(rule)); } public SweetDB getDatabase() { return this.sweetDB; } /** * The path to the file of the table. * @return */ public File getPath() { return this.path; } /** * The name of the table. * @return */ public String getName() { return this.name; } /** * Creates a new InsertAction to insert a new DataSet into the table. * @return */ public InsertAction insert() { return new InsertAction(this.sweetDB, this, this.syntax); } /** * Inserts a DataSet into the table. * @param dataSet */ public void insert(DataSet dataSet) { this.dataSets.add(dataSet); if(this.sweetDB.isAutosave()) { this.store(); } } /** * Finds a list of DataSets in the table. * @param predicate * @return */ public List<DataSet> find(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).collect(Collectors.toCollection(ArrayList::new)); } public Optional<DataSet> findAny(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).findAny(); } public Optional<DataSet> findFirst(Predicate<? super DataSet> predicate) { return this.dataSets.stream().filter(predicate).findFirst(); } /** * Returns a list with all DataSets from the table. * @return */ public List<DataSet> all() { return this.dataSets; } /** * Drops the table. * @return */ public void drop() { if(this.path.exists()) { this.path.delete(); } Iterator<Table> tableIterator = this.sweetDB.getTables().iterator(); while(tableIterator.hasNext()) { if(tableIterator.next().getName().equals(this.getName())) { tableIterator.remove(); break; } } } /** * Parses the data. * @param data */ public void parse(String data) { Gson gson = new Gson(); try { JsonObject head = gson.fromJson(data, JsonObject.class); Optional<JsonObject> table = Optional.empty(); if(head.has("table")) { table = Optional.of(head.get("table").getAsJsonObject()); } if(table.get().has("syntax")) { JsonArray syntaxRules = table.get().get("syntax").getAsJsonArray(); syntaxRules.forEach(entry -> { JsonObject syntaxRule = entry.getAsJsonObject(); Optional<DataType> dataType = DataTypes.get(syntaxRule.get("dataType").getAsString()); if(dataType.isPresent()) { this.syntax.add( SyntaxRuleBuilder.create() .fieldName(syntaxRule.get("field").getAsString()) .dataType(dataType.get()) .isUnique(syntaxRule.get("isUnique").getAsBoolean()) .isNullable(syntaxRule.get("isNullable").getAsBoolean()) .isAutoincrement(syntaxRule.get("isAutoincrement").getAsBoolean()) .build() ); } else { throw new IllegalArgumentException(String.format( "\"%s\" is not a valid DataType. Please check the syntax of \"%s\".", syntaxRule.get("dataType").getAsString(), this.getName() )); } }); if(head.has("data") && !(this.syntax.getSyntax().isEmpty())) { JsonArray dataSets = head.get("data").getAsJsonArray(); dataSets.forEach(entry -> { if(!(this.syntax.parseValidation(entry.getAsJsonObject()))) { throw new IllegalArgumentException(String.format( "Invalid syntax in \"%s\" -> %s expected %s. Missing field(s): %s", this.getName(), entry.toString(), this.syntax.getAsString(), String.join(", ", this.syntax.missingFields(entry.getAsJsonObject())) )); } List<Field> fields = entry.getAsJsonObject().entrySet().stream().map(field -> new Field(this.sweetDB, this, field.getKey(), this.syntax.get(field.getKey()).getDataType().parse((field.getValue().isJsonNull() ? null : field.getValue().getAsString())))).collect(Collectors.toList()); this.dataSets.add(new DataSet(fields)); }); } } } catch (Exception exception) { exception.printStackTrace(); //TODO invalid syntax } } /** * Stores the data of the table in the file. */ public void store() { if(this.executorService.isShutdown()) { this.executorService = Executors.newFixedThreadPool(this.sweetDB.getStorageThreads()); } Future<?> task = this.executorService.submit(() -> { JsonObject headData = new JsonObject(); JsonObject tableData = new JsonObject(); JsonArray syntaxData = new JsonArray(); for(Map.Entry<String, SyntaxRule> entry : syntax.getSyntax().entrySet()) { JsonObject syntaxRule = new JsonObject(); syntaxRule.addProperty("field", entry.getKey()); syntaxRule.addProperty("dataType", entry.getValue().getDataType().getName()); syntaxRule.addProperty("isNullable", entry.getValue().isNullable()); syntaxRule.addProperty("isUnique", entry.getValue().isUnique()); syntaxRule.addProperty("isAutoincrement", entry.getValue().isAutoincrement()); syntaxData.add(syntaxRule); } JsonArray data = new JsonArray(); for(DataSet entry : dataSets) { JsonObject dataEntry = new JsonObject(); for(Field field : entry.getFields()) { dataEntry.addProperty(field.getName(), (field.getValue() == null ? null : field.getValue().toString())); } data.add(dataEntry); } tableData.add("syntax", syntaxData); headData.add("table", tableData); headData.add("data", data); try { if(!(path.exists())) { path.createNewFile(); } FileUtils.write(path, headData.toString(), "UTF-8", false); } catch (IOException e) { e.printStackTrace(); //TODO } }); try { task.get(30, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } catch (TimeoutException e) { task.cancel(true); } this.executorService.shutdown(); } }
Added DataSet validation for the Table#insert(DataSet) method
src/main/java/de/SweetCode/SweetDB/Table/Table.java
Added DataSet validation for the Table#insert(DataSet) method
<ide><path>rc/main/java/de/SweetCode/SweetDB/Table/Table.java <ide> * Inserts a DataSet into the table. <ide> * @param dataSet <ide> */ <del> public void insert(DataSet dataSet) { <add> public boolean insert(DataSet dataSet) { <add> <add> if(!(this.syntax.validate(dataSet))) { <add> if(this.sweetDB.isDebugging()) { <add> throw new IllegalArgumentException(String.format( <add> "Invalid insert query.", <add> this.syntax.getAsString() <add> )); <add> } else { <add> return false; <add> } <add> } <add> <ide> this.dataSets.add(dataSet); <ide> <ide> if(this.sweetDB.isAutosave()) { <ide> this.store(); <ide> } <add> <add> return true; <ide> } <ide> <ide> /**
JavaScript
mit
9e3d2015e0979b55a1bc9ee52b3702a91c5fe035
0
kagemusha/ember-phoenix-channel,kagemusha/ember-phoenix-channel
/** * Phoenix Channels JavaScript client * * ## Socket Connection * * A single connection is established to the server and * channels are multiplexed over the connection. * Connect to the server using the `Socket` class: * * ```javascript * let socket = new Socket("/socket", {params: {userToken: "123"}}) * socket.connect() * ``` * * The `Socket` constructor takes the mount point of the socket, * the authentication params, as well as options that can be found in * the Socket docs, such as configuring the `LongPoll` transport, and * heartbeat. * * ## Channels * * Channels are isolated, concurrent processes on the server that * subscribe to topics and broker events between the client and server. * To join a channel, you must provide the topic, and channel params for * authorization. Here's an example chat room example where `"new_msg"` * events are listened for, messages are pushed to the server, and * the channel is joined with ok/error/timeout matches: * * ```javascript * let channel = socket.channel("room:123", {token: roomToken}) * channel.on("new_msg", msg => console.log("Got message", msg) ) * $input.onEnter( e => { * channel.push("new_msg", {body: e.target.val}, 10000) * .receive("ok", (msg) => console.log("created message", msg) ) * .receive("error", (reasons) => console.log("create failed", reasons) ) * .receive("timeout", () => console.log("Networking issue...") ) * }) * * channel.join() * .receive("ok", ({messages}) => console.log("catching up", messages) ) * .receive("error", ({reason}) => console.log("failed join", reason) ) * .receive("timeout", () => console.log("Networking issue. Still waiting...")) *``` * * ## Joining * * Creating a channel with `socket.channel(topic, params)`, binds the params to * `channel.params`, which are sent up on `channel.join()`. * Subsequent rejoins will send up the modified params for * updating authorization params, or passing up last_message_id information. * Successful joins receive an "ok" status, while unsuccessful joins * receive "error". * * ## Duplicate Join Subscriptions * * While the client may join any number of topics on any number of channels, * the client may only hold a single subscription for each unique topic at any * given time. When attempting to create a duplicate subscription, * the server will close the existing channel, log a warning, and * spawn a new channel for the topic. The client will have their * `channel.onClose` callbacks fired for the existing channel, and the new * channel join will have its receive hooks processed as normal. * * ## Pushing Messages * * From the previous example, we can see that pushing messages to the server * can be done with `channel.push(eventName, payload)` and we can optionally * receive responses from the push. Additionally, we can use * `receive("timeout", callback)` to abort waiting for our other `receive` hooks * and take action after some period of waiting. The default timeout is 10000ms. * * * ## Socket Hooks * * Lifecycle events of the multiplexed connection can be hooked into via * `socket.onError()` and `socket.onClose()` events, ie: * * ```javascript * socket.onError( () => console.log("there was an error with the connection!") ) * socket.onClose( () => console.log("the connection dropped") ) * ``` * * * ## Channel Hooks * * For each joined channel, you can bind to `onError` and `onClose` events * to monitor the channel lifecycle, ie: * * ```javascript * channel.onError( () => console.log("there was an error!") ) * channel.onClose( () => console.log("the channel has gone away gracefully") ) * ``` * * ### onError hooks * * `onError` hooks are invoked if the socket connection drops, or the channel * crashes on the server. In either case, a channel rejoin is attempted * automatically in an exponential backoff manner. * * ### onClose hooks * * `onClose` hooks are invoked only in two cases. 1) the channel explicitly * closed on the server, or 2). The client explicitly closed, by calling * `channel.leave()` * * * ## Presence * * The `Presence` object provides features for syncing presence information * from the server with the client and handling presences joining and leaving. * * ### Syncing state from the server * * To sync presence state from the server, first instantiate an object and * pass your channel in to track lifecycle events: * * ```javascript * let channel = socket.channel("some:topic") * let presence = new Presence(channel) * ``` * * Next, use the `presence.onSync` callback to react to state changes * from the server. For example, to render the list of users every time * the list changes, you could write: * * ```javascript * presence.onSync(() => { * myRenderUsersFunction(presence.list()) * }) * ``` * * ### Listing Presences * * `presence.list` is used to return a list of presence information * based on the local state of metadata. By default, all presence * metadata is returned, but a `listBy` function can be supplied to * allow the client to select which metadata to use for a given presence. * For example, you may have a user online from different devices with * a metadata status of "online", but they have set themselves to "away" * on another device. In this case, the app may choose to use the "away" * status for what appears on the UI. The example below defines a `listBy` * function which prioritizes the first metadata which was registered for * each user. This could be the first tab they opened, or the first device * they came online from: * * ```javascript * let listBy = (id, {metas: [first, ...rest]}) => { * first.count = rest.length + 1 // count of this user's presences * first.id = id * return first * } * let onlineUsers = presence.list(listBy) * ``` * * ### Handling individual presence join and leave events * * The `presence.onJoin` and `presence.onLeave` callbacks can be used to * react to individual presences joining and leaving the app. For example: * * ```javascript * let presence = new Presence(channel) * * // detect if user has joined for the 1st time or from another tab/device * presence.onJoin((id, current, newPres) => { * if(!current){ * console.log("user has entered for the first time", newPres) * } else { * console.log("user additional presence", newPres) * } * }) * * // detect if user has left from all tabs/devices, or is still present * presence.onLeave((id, current, leftPres) => { * if(current.metas.length === 0){ * console.log("user has left from all devices", leftPres) * } else { * console.log("user left from a device", leftPres) * } * }) * // receive presence data from server * presence.onSync(() => { * displayUsers(presence.list()) * }) * ``` * @module phoenix */ const globalSelf = typeof self !== "undefined" ? self : null const phxWindow = typeof window !== "undefined" ? window : null const global = globalSelf || phxWindow || this const DEFAULT_VSN = "2.0.0" const SOCKET_STATES = {connecting: 0, open: 1, closing: 2, closed: 3} const DEFAULT_TIMEOUT = 10000 const WS_CLOSE_NORMAL = 1000 const CHANNEL_STATES = { closed: "closed", errored: "errored", joined: "joined", joining: "joining", leaving: "leaving", } const CHANNEL_EVENTS = { close: "phx_close", error: "phx_error", join: "phx_join", reply: "phx_reply", leave: "phx_leave" } const CHANNEL_LIFECYCLE_EVENTS = [ CHANNEL_EVENTS.close, CHANNEL_EVENTS.error, CHANNEL_EVENTS.join, CHANNEL_EVENTS.reply, CHANNEL_EVENTS.leave ] const TRANSPORTS = { longpoll: "longpoll", websocket: "websocket" } // wraps value in closure or returns closure let closure = (value) => { if(typeof value === "function"){ return value } else { let closure = function(){ return value } return closure } } /** * Initializes the Push * @param {Channel} channel - The Channel * @param {string} event - The event, for example `"phx_join"` * @param {Object} payload - The payload, for example `{user_id: 123}` * @param {number} timeout - The push timeout in milliseconds */ class Push { constructor(channel, event, payload, timeout){ this.channel = channel this.event = event this.payload = payload || function(){ return {} } this.receivedResp = null this.timeout = timeout this.timeoutTimer = null this.recHooks = [] this.sent = false } /** * * @param {number} timeout */ resend(timeout){ this.timeout = timeout this.reset() this.send() } /** * */ send(){ if(this.hasReceived("timeout")){ return } this.startTimeout() this.sent = true this.channel.socket.push({ topic: this.channel.topic, event: this.event, payload: this.payload(), ref: this.ref, join_ref: this.channel.joinRef() }) } /** * * @param {*} status * @param {*} callback */ receive(status, callback){ if(this.hasReceived(status)){ callback(this.receivedResp.response) } this.recHooks.push({status, callback}) return this } /** * @private */ reset(){ this.cancelRefEvent() this.ref = null this.refEvent = null this.receivedResp = null this.sent = false } /** * @private */ matchReceive({status, response, ref}){ this.recHooks.filter( h => h.status === status ) .forEach( h => h.callback(response) ) } /** * @private */ cancelRefEvent(){ if(!this.refEvent){ return } this.channel.off(this.refEvent) } /** * @private */ cancelTimeout(){ clearTimeout(this.timeoutTimer) this.timeoutTimer = null } /** * @private */ startTimeout(){ if(this.timeoutTimer){ this.cancelTimeout() } this.ref = this.channel.socket.makeRef() this.refEvent = this.channel.replyEventName(this.ref) this.channel.on(this.refEvent, payload => { this.cancelRefEvent() this.cancelTimeout() this.receivedResp = payload this.matchReceive(payload) }) this.timeoutTimer = setTimeout(() => { this.trigger("timeout", {}) }, this.timeout) } /** * @private */ hasReceived(status){ return this.receivedResp && this.receivedResp.status === status } /** * @private */ trigger(status, response){ this.channel.trigger(this.refEvent, {status, response}) } } /** * * @param {string} topic * @param {(Object|function)} params * @param {Socket} socket */ export class Channel { constructor(topic, params, socket) { this.state = CHANNEL_STATES.closed this.topic = topic this.params = closure(params || {}) this.socket = socket this.bindings = [] this.bindingRef = 0 this.timeout = this.socket.timeout this.joinedOnce = false this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.params, this.timeout) this.pushBuffer = [] this.rejoinTimer = new Timer(() => { if(this.socket.isConnected()){ this.rejoin() } }, this.socket.rejoinAfterMs) this.socket.onError(() => this.rejoinTimer.reset()) this.socket.onOpen(() => { this.rejoinTimer.reset() if(this.isErrored()){ this.rejoin() } }) this.joinPush.receive("ok", () => { this.state = CHANNEL_STATES.joined this.rejoinTimer.reset() this.pushBuffer.forEach( pushEvent => pushEvent.send() ) this.pushBuffer = [] }) this.joinPush.receive("error", () => { this.state = CHANNEL_STATES.errored if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } }) this.onClose(() => { this.rejoinTimer.reset() if(this.socket.hasLogger()) this.socket.log("channel", `close ${this.topic} ${this.joinRef()}`) this.state = CHANNEL_STATES.closed this.socket.remove(this) }) this.onError(reason => { if(this.socket.hasLogger()) this.socket.log("channel", `error ${this.topic}`, reason) if(this.isJoining()){ this.joinPush.reset() } this.state = CHANNEL_STATES.errored if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } }) this.joinPush.receive("timeout", () => { if(this.socket.hasLogger()) this.socket.log("channel", `timeout ${this.topic} (${this.joinRef()})`, this.joinPush.timeout) let leavePush = new Push(this, CHANNEL_EVENTS.leave, closure({}), this.timeout) leavePush.send() this.state = CHANNEL_STATES.errored this.joinPush.reset() if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } }) this.on(CHANNEL_EVENTS.reply, (payload, ref) => { this.trigger(this.replyEventName(ref), payload) }) } /** * Join the channel * @param {integer} timeout * @returns {Push} */ join(timeout = this.timeout){ if(this.joinedOnce){ throw new Error(`tried to join multiple times. 'join' can only be called a single time per channel instance`) } else { this.timeout = timeout this.joinedOnce = true this.rejoin() return this.joinPush } } /** * Hook into channel close * @param {Function} callback */ onClose(callback){ this.on(CHANNEL_EVENTS.close, callback) } /** * Hook into channel errors * @param {Function} callback */ onError(callback){ return this.on(CHANNEL_EVENTS.error, reason => callback(reason)) } /** * Subscribes on channel events * * Subscription returns a ref counter, which can be used later to * unsubscribe the exact event listener * * @example * const ref1 = channel.on("event", do_stuff) * const ref2 = channel.on("event", do_other_stuff) * channel.off("event", ref1) * // Since unsubscription, do_stuff won't fire, * // while do_other_stuff will keep firing on the "event" * * @param {string} event * @param {Function} callback * @returns {integer} ref */ on(event, callback){ let ref = this.bindingRef++ this.bindings.push({event, ref, callback}) return ref } /** * @param {string} event * @param {integer} ref */ off(event, ref){ this.bindings = this.bindings.filter((bind) => { return !(bind.event === event && (typeof ref === "undefined" || ref === bind.ref)) }) } /** * @private */ canPush(){ return this.socket.isConnected() && this.isJoined() } /** * @param {string} event * @param {Object} payload * @param {number} [timeout] * @returns {Push} */ push(event, payload, timeout = this.timeout){ if(!this.joinedOnce){ throw new Error(`tried to push '${event}' to '${this.topic}' before joining. Use channel.join() before pushing events`) } let pushEvent = new Push(this, event, function(){ return payload }, timeout) if(this.canPush()){ pushEvent.send() } else { pushEvent.startTimeout() this.pushBuffer.push(pushEvent) } return pushEvent } /** Leaves the channel * * Unsubscribes from server events, and * instructs channel to terminate on server * * Triggers onClose() hooks * * To receive leave acknowledgements, use the a `receive` * hook to bind to the server ack, ie: * * @example * channel.leave().receive("ok", () => alert("left!") ) * * @param {integer} timeout * @returns {Push} */ leave(timeout = this.timeout){ this.rejoinTimer.reset() this.joinPush.cancelTimeout() this.state = CHANNEL_STATES.leaving let onClose = () => { if(this.socket.hasLogger()) this.socket.log("channel", `leave ${this.topic}`) this.trigger(CHANNEL_EVENTS.close, "leave") } let leavePush = new Push(this, CHANNEL_EVENTS.leave, closure({}), timeout) leavePush.receive("ok", () => onClose() ) .receive("timeout", () => onClose() ) leavePush.send() if(!this.canPush()){ leavePush.trigger("ok", {}) } return leavePush } /** * Overridable message hook * * Receives all events for specialized message handling * before dispatching to the channel callbacks. * * Must return the payload, modified or unmodified * @param {string} event * @param {Object} payload * @param {integer} ref * @returns {Object} */ onMessage(event, payload, ref){ return payload } /** * @private */ isLifecycleEvent(event) { return CHANNEL_LIFECYCLE_EVENTS.indexOf(event) >= 0 } /** * @private */ isMember(topic, event, payload, joinRef){ if(this.topic !== topic){ return false } if(joinRef && joinRef !== this.joinRef() && this.isLifecycleEvent(event)){ if (this.socket.hasLogger()) this.socket.log("channel", "dropping outdated message", {topic, event, payload, joinRef}) return false } else { return true } } /** * @private */ joinRef(){ return this.joinPush.ref } /** * @private */ sendJoin(timeout){ this.state = CHANNEL_STATES.joining this.joinPush.resend(timeout) } /** * @private */ rejoin(timeout = this.timeout){ if(this.isLeaving()){ return } this.sendJoin(timeout) } /** * @private */ trigger(event, payload, ref, joinRef){ let handledPayload = this.onMessage(event, payload, ref, joinRef) if(payload && !handledPayload){ throw new Error("channel onMessage callbacks must return the payload, modified or unmodified") } for (let i = 0; i < this.bindings.length; i++) { const bind = this.bindings[i] if(bind.event !== event){ continue } bind.callback(handledPayload, ref, joinRef || this.joinRef()) } } /** * @private */ replyEventName(ref){ return `chan_reply_${ref}` } /** * @private */ isClosed() { return this.state === CHANNEL_STATES.closed } /** * @private */ isErrored(){ return this.state === CHANNEL_STATES.errored } /** * @private */ isJoined() { return this.state === CHANNEL_STATES.joined } /** * @private */ isJoining(){ return this.state === CHANNEL_STATES.joining } /** * @private */ isLeaving(){ return this.state === CHANNEL_STATES.leaving } } /* The default serializer for encoding and decoding messages */ export let Serializer = { encode(msg, callback){ let payload = [ msg.join_ref, msg.ref, msg.topic, msg.event, msg.payload ] return callback(JSON.stringify(payload)) }, decode(rawPayload, callback){ let [join_ref, ref, topic, event, payload] = JSON.parse(rawPayload) return callback({join_ref, ref, topic, event, payload}) } } /** Initializes the Socket * * * For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim) * * @param {string} endPoint - The string WebSocket endpoint, ie, `"ws://example.com/socket"`, * `"wss://example.com"` * `"/socket"` (inherited host & protocol) * @param {Object} [opts] - Optional configuration * @param {string} [opts.transport] - The Websocket Transport, for example WebSocket or Phoenix.LongPoll. * * Defaults to WebSocket with automatic LongPoll fallback. * @param {Function} [opts.encode] - The function to encode outgoing messages. * * Defaults to JSON encoder. * * @param {Function} [opts.decode] - The function to decode incoming messages. * * Defaults to JSON: * * ```javascript * (payload, callback) => callback(JSON.parse(payload)) * ``` * * @param {number} [opts.timeout] - The default timeout in milliseconds to trigger push timeouts. * * Defaults `DEFAULT_TIMEOUT` * @param {number} [opts.heartbeatIntervalMs] - The millisec interval to send a heartbeat message * @param {number} [opts.reconnectAfterMs] - The optional function that returns the millsec * socket reconnect interval. * * Defaults to stepped backoff of: * * ```javascript * function(tries){ * return [10, 50, 100, 150, 200, 250, 500, 1000, 2000][tries - 1] || 5000 * } * ```` * * @param {number} [opts.rejoinAfterMs] - The optional function that returns the millsec * rejoin interval for individual channels. * * ```javascript * function(tries){ * return [1000, 2000, 5000][tries - 1] || 10000 * } * ```` * * @param {Function} [opts.logger] - The optional function for specialized logging, ie: * * ```javascript * function(kind, msg, data) { * console.log(`${kind}: ${msg}`, data) * } * ``` * * @param {number} [opts.longpollerTimeout] - The maximum timeout of a long poll AJAX request. * * Defaults to 20s (double the server long poll timer). * * @param {{Object|function)} [opts.params] - The optional params to pass when connecting * @param {string} [opts.binaryType] - The binary type to use for binary WebSocket frames. * * Defaults to "arraybuffer" * * @param {vsn} [opts.vsn] - The serializer's protocol version to send on connect. * * Defaults to DEFAULT_VSN. */ export class Socket { constructor(endPoint, opts = {}){ this.stateChangeCallbacks = {open: [], close: [], error: [], message: []} this.channels = [] this.sendBuffer = [] this.ref = 0 this.timeout = opts.timeout || DEFAULT_TIMEOUT this.transport = opts.transport || global.WebSocket || LongPoll this.defaultEncoder = Serializer.encode this.defaultDecoder = Serializer.decode this.closeWasClean = false this.unloaded = false this.binaryType = opts.binaryType || "arraybuffer" if(this.transport !== LongPoll){ this.encode = opts.encode || this.defaultEncoder this.decode = opts.decode || this.defaultDecoder } else { this.encode = this.defaultEncoder this.decode = this.defaultDecoder } if(phxWindow && phxWindow.addEventListener){ phxWindow.addEventListener("beforeunload", e => { if(this.conn){ this.unloaded = true this.abnormalClose("unloaded") } }) } this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000 this.rejoinAfterMs = (tries) => { if(opts.rejoinAfterMs){ return opts.rejoinAfterMs(tries) } else { return [1000, 2000, 5000][tries - 1] || 10000 } } this.reconnectAfterMs = (tries) => { if(this.unloaded){ return 100 } if(opts.reconnectAfterMs){ return opts.reconnectAfterMs(tries) } else { return [10, 50, 100, 150, 200, 250, 500, 1000, 2000][tries - 1] || 5000 } } this.logger = opts.logger || null this.longpollerTimeout = opts.longpollerTimeout || 20000 this.params = closure(opts.params || {}) this.endPoint = `${endPoint}/${TRANSPORTS.websocket}` this.vsn = opts.vsn || DEFAULT_VSN this.heartbeatTimer = null this.pendingHeartbeatRef = null this.reconnectTimer = new Timer(() => { this.teardown(() => this.connect()) }, this.reconnectAfterMs) } /** * Returns the socket protocol * * @returns {string} */ protocol(){ return location.protocol.match(/^https/) ? "wss" : "ws" } /** * The fully qualifed socket url * * @returns {string} */ endPointURL(){ let uri = Ajax.appendParams( Ajax.appendParams(this.endPoint, this.params()), {vsn: this.vsn}) if(uri.charAt(0) !== "/"){ return uri } if(uri.charAt(1) === "/"){ return `${this.protocol()}:${uri}` } return `${this.protocol()}://${location.host}${uri}` } /** * Disconnects the socket * * See https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent#Status_codes for valid status codes. * * @param {Function} callback - Optional callback which is called after socket is disconnected. * @param {integer} code - A status code for disconnection (Optional). * @param {string} reason - A textual description of the reason to disconnect. (Optional) */ disconnect(callback, code, reason){ this.closeWasClean = true this.reconnectTimer.reset() this.teardown(callback, code, reason) } /** * * @param {Object} params - The params to send when connecting, for example `{user_id: userToken}` * * Passing params to connect is deprecated; pass them in the Socket constructor instead: * `new Socket("/socket", {params: {user_id: userToken}})`. */ connect(params){ if(params){ console && console.log("passing params to connect is deprecated. Instead pass :params to the Socket constructor") this.params = closure(params) } if(this.conn){ return } this.closeWasClean = false this.conn = new this.transport(this.endPointURL()) this.conn.binaryType = this.binaryType this.conn.timeout = this.longpollerTimeout this.conn.onopen = () => this.onConnOpen() this.conn.onerror = error => this.onConnError(error) this.conn.onmessage = event => this.onConnMessage(event) this.conn.onclose = event => this.onConnClose(event) } /** * Logs the message. Override `this.logger` for specialized logging. noops by default * @param {string} kind * @param {string} msg * @param {Object} data */ log(kind, msg, data){ this.logger(kind, msg, data) } /** * Returns true if a logger has been set on this socket. */ hasLogger(){ return this.logger !== null } /** * Registers callbacks for connection open events * * @example socket.onOpen(function(){ console.info("the socket was opened") }) * * @param {Function} callback */ onOpen(callback){ this.stateChangeCallbacks.open.push(callback) } /** * Registers callbacks for connection close events * @param {Function} callback */ onClose(callback){ this.stateChangeCallbacks.close.push(callback) } /** * Registers callbacks for connection error events * * @example socket.onError(function(error){ alert("An error occurred") }) * * @param {Function} callback */ onError(callback){ this.stateChangeCallbacks.error.push(callback) } /** * Registers callbacks for connection message events * @param {Function} callback */ onMessage(callback){ this.stateChangeCallbacks.message.push(callback) } /** * @private */ onConnOpen(){ if (this.hasLogger()) this.log("transport", `connected to ${this.endPointURL()}`) this.unloaded = false this.closeWasClean = false this.flushSendBuffer() this.reconnectTimer.reset() this.resetHeartbeat() this.stateChangeCallbacks.open.forEach( callback => callback() ) } /** * @private */ resetHeartbeat(){ if(this.conn && this.conn.skipHeartbeat){ return } this.pendingHeartbeatRef = null clearInterval(this.heartbeatTimer) this.heartbeatTimer = setInterval(() => this.sendHeartbeat(), this.heartbeatIntervalMs) } teardown(callback, code, reason){ if(this.conn){ this.conn.onclose = function(){} // noop if(code){ this.conn.close(code, reason || "") } else { this.conn.close() } this.conn = null } callback && callback() } onConnClose(event){ if (this.hasLogger()) this.log("transport", "close", event) this.triggerChanError() clearInterval(this.heartbeatTimer) if(!this.closeWasClean){ this.reconnectTimer.scheduleTimeout() } this.stateChangeCallbacks.close.forEach( callback => callback(event) ) } /** * @private */ onConnError(error){ if (this.hasLogger()) this.log("transport", error) this.triggerChanError() this.stateChangeCallbacks.error.forEach( callback => callback(error) ) } /** * @private */ triggerChanError(){ this.channels.forEach( channel => { if(!(channel.isErrored() || channel.isLeaving() || channel.isClosed())){ channel.trigger(CHANNEL_EVENTS.error) } }) } /** * @returns {string} */ connectionState(){ switch(this.conn && this.conn.readyState){ case SOCKET_STATES.connecting: return "connecting" case SOCKET_STATES.open: return "open" case SOCKET_STATES.closing: return "closing" default: return "closed" } } /** * @returns {boolean} */ isConnected(){ return this.connectionState() === "open" } /** * @param {Channel} */ remove(channel){ this.channels = this.channels.filter(c => c.joinRef() !== channel.joinRef()) } /** * Initiates a new channel for the given topic * * @param {string} topic * @param {Object} chanParams - Parameters for the channel * @returns {Channel} */ channel(topic, chanParams = {}){ let chan = new Channel(topic, chanParams, this) this.channels.push(chan) return chan } /** * @param {Object} data */ push(data){ if (this.hasLogger()) { let {topic, event, payload, ref, join_ref} = data this.log("push", `${topic} ${event} (${join_ref}, ${ref})`, payload) } if(this.isConnected()){ this.encode(data, result => this.conn.send(result)) } else { this.sendBuffer.push(() => this.encode(data, result => this.conn.send(result))) } } /** * Return the next message ref, accounting for overflows * @returns {string} */ makeRef(){ let newRef = this.ref + 1 if(newRef === this.ref){ this.ref = 0 } else { this.ref = newRef } return this.ref.toString() } sendHeartbeat(){ if(!this.isConnected()){ return } if(this.pendingHeartbeatRef){ this.pendingHeartbeatRef = null if (this.hasLogger()) this.log("transport", "heartbeat timeout. Attempting to re-establish connection") this.abnormalClose("heartbeat timeout") return } this.pendingHeartbeatRef = this.makeRef() this.push({topic: "phoenix", event: "heartbeat", payload: {}, ref: this.pendingHeartbeatRef}) } abnormalClose(reason){ this.closeWasClean = false this.conn.close(WS_CLOSE_NORMAL, reason) } flushSendBuffer(){ if(this.isConnected() && this.sendBuffer.length > 0){ this.sendBuffer.forEach( callback => callback() ) this.sendBuffer = [] } } onConnMessage(rawMessage){ this.decode(rawMessage.data, msg => { let {topic, event, payload, ref, join_ref} = msg if(ref && ref === this.pendingHeartbeatRef){ this.pendingHeartbeatRef = null } if (this.hasLogger()) this.log("receive", `${payload.status || ""} ${topic} ${event} ${ref && "(" + ref + ")" || ""}`, payload) for (let i = 0; i < this.channels.length; i++) { const channel = this.channels[i] if(!channel.isMember(topic, event, payload, join_ref)){ continue } channel.trigger(event, payload, ref, join_ref) } for (let i = 0; i < this.stateChangeCallbacks.message.length; i++) { this.stateChangeCallbacks.message[i](msg) } }) } } export class LongPoll { constructor(endPoint){ this.endPoint = null this.token = null this.skipHeartbeat = true this.onopen = function(){} // noop this.onerror = function(){} // noop this.onmessage = function(){} // noop this.onclose = function(){} // noop this.pollEndpoint = this.normalizeEndpoint(endPoint) this.readyState = SOCKET_STATES.connecting this.poll() } normalizeEndpoint(endPoint){ return(endPoint .replace("ws://", "http://") .replace("wss://", "https://") .replace(new RegExp("(.*)\/" + TRANSPORTS.websocket), "$1/" + TRANSPORTS.longpoll)) } endpointURL(){ return Ajax.appendParams(this.pollEndpoint, {token: this.token}) } closeAndRetry(){ this.close() this.readyState = SOCKET_STATES.connecting } ontimeout(){ this.onerror("timeout") this.closeAndRetry() } poll(){ if(!(this.readyState === SOCKET_STATES.open || this.readyState === SOCKET_STATES.connecting)){ return } Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), (resp) => { if(resp){ var {status, token, messages} = resp this.token = token } else{ var status = 0 } switch(status){ case 200: messages.forEach(msg => this.onmessage({data: msg})) this.poll() break case 204: this.poll() break case 410: this.readyState = SOCKET_STATES.open this.onopen() this.poll() break case 0: case 500: this.onerror() this.closeAndRetry() break default: throw new Error(`unhandled poll status ${status}`) } }) } send(body){ Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), (resp) => { if(!resp || resp.status !== 200){ this.onerror(resp && resp.status) this.closeAndRetry() } }) } close(code, reason){ this.readyState = SOCKET_STATES.closed this.onclose() } } export class Ajax { static request(method, endPoint, accept, body, timeout, ontimeout, callback){ if(global.XDomainRequest){ let req = new XDomainRequest() // IE8, IE9 this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) } else { let req = global.XMLHttpRequest ? new global.XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari new ActiveXObject("Microsoft.XMLHTTP") // IE6, IE5 this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) } } static xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback){ req.timeout = timeout req.open(method, endPoint) req.onload = () => { let response = this.parseJSON(req.responseText) callback && callback(response) } if(ontimeout){ req.ontimeout = ontimeout } // Work around bug in IE9 that requires an attached onprogress handler req.onprogress = () => {} req.send(body) } static xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback){ req.open(method, endPoint, true) req.timeout = timeout req.setRequestHeader("Content-Type", accept) req.onerror = () => { callback && callback(null) } req.onreadystatechange = () => { if(req.readyState === this.states.complete && callback){ let response = this.parseJSON(req.responseText) callback(response) } } if(ontimeout){ req.ontimeout = ontimeout } req.send(body) } static parseJSON(resp){ if(!resp || resp === ""){ return null } try { return JSON.parse(resp) } catch(e) { console && console.log("failed to parse JSON response", resp) return null } } static serialize(obj, parentKey){ let queryStr = [] for(var key in obj){ if(!obj.hasOwnProperty(key)){ continue } let paramKey = parentKey ? `${parentKey}[${key}]` : key let paramVal = obj[key] if(typeof paramVal === "object"){ queryStr.push(this.serialize(paramVal, paramKey)) } else { queryStr.push(encodeURIComponent(paramKey) + "=" + encodeURIComponent(paramVal)) } } return queryStr.join("&") } static appendParams(url, params){ if(Object.keys(params).length === 0){ return url } let prefix = url.match(/\?/) ? "&" : "?" return `${url}${prefix}${this.serialize(params)}` } } Ajax.states = {complete: 4} /** * Initializes the Presence * @param {Channel} channel - The Channel * @param {Object} opts - The options, * for example `{events: {state: "state", diff: "diff"}}` */ export class Presence { constructor(channel, opts = {}){ let events = opts.events || {state: "presence_state", diff: "presence_diff"} this.state = {} this.pendingDiffs = [] this.channel = channel this.joinRef = null this.caller = { onJoin: function(){}, onLeave: function(){}, onSync: function(){} } this.channel.on(events.state, newState => { let {onJoin, onLeave, onSync} = this.caller this.joinRef = this.channel.joinRef() this.state = Presence.syncState(this.state, newState, onJoin, onLeave) this.pendingDiffs.forEach(diff => { this.state = Presence.syncDiff(this.state, diff, onJoin, onLeave) }) this.pendingDiffs = [] onSync() }) this.channel.on(events.diff, diff => { let {onJoin, onLeave, onSync} = this.caller if(this.inPendingSyncState()){ this.pendingDiffs.push(diff) } else { this.state = Presence.syncDiff(this.state, diff, onJoin, onLeave) onSync() } }) } onJoin(callback){ this.caller.onJoin = callback } onLeave(callback){ this.caller.onLeave = callback } onSync(callback){ this.caller.onSync = callback } list(by){ return Presence.list(this.state, by) } inPendingSyncState(){ return !this.joinRef || (this.joinRef !== this.channel.joinRef()) } // lower-level public static API /** * Used to sync the list of presences on the server * with the client's state. An optional `onJoin` and `onLeave` callback can * be provided to react to changes in the client's local presences across * disconnects and reconnects with the server. * * @returns {Presence} */ static syncState(currentState, newState, onJoin, onLeave){ let state = this.clone(currentState) let joins = {} let leaves = {} this.map(state, (key, presence) => { if(!newState[key]){ leaves[key] = presence } }) this.map(newState, (key, newPresence) => { let currentPresence = state[key] if(currentPresence){ let newRefs = newPresence.metas.map(m => m.phx_ref) let curRefs = currentPresence.metas.map(m => m.phx_ref) let joinedMetas = newPresence.metas.filter(m => curRefs.indexOf(m.phx_ref) < 0) let leftMetas = currentPresence.metas.filter(m => newRefs.indexOf(m.phx_ref) < 0) if(joinedMetas.length > 0){ joins[key] = newPresence joins[key].metas = joinedMetas } if(leftMetas.length > 0){ leaves[key] = this.clone(currentPresence) leaves[key].metas = leftMetas } } else { joins[key] = newPresence } }) return this.syncDiff(state, {joins: joins, leaves: leaves}, onJoin, onLeave) } /** * * Used to sync a diff of presence join and leave * events from the server, as they happen. Like `syncState`, `syncDiff` * accepts optional `onJoin` and `onLeave` callbacks to react to a user * joining or leaving from a device. * * @returns {Presence} */ static syncDiff(currentState, {joins, leaves}, onJoin, onLeave){ let state = this.clone(currentState) if(!onJoin){ onJoin = function(){} } if(!onLeave){ onLeave = function(){} } this.map(joins, (key, newPresence) => { let currentPresence = state[key] state[key] = newPresence if(currentPresence){ let joinedRefs = state[key].metas.map(m => m.phx_ref) let curMetas = currentPresence.metas.filter(m => joinedRefs.indexOf(m.phx_ref) < 0) state[key].metas.unshift(...curMetas) } onJoin(key, currentPresence, newPresence) }) this.map(leaves, (key, leftPresence) => { let currentPresence = state[key] if(!currentPresence){ return } let refsToRemove = leftPresence.metas.map(m => m.phx_ref) currentPresence.metas = currentPresence.metas.filter(p => { return refsToRemove.indexOf(p.phx_ref) < 0 }) onLeave(key, currentPresence, leftPresence) if(currentPresence.metas.length === 0){ delete state[key] } }) return state } /** * Returns the array of presences, with selected metadata. * * @param {Object} presences * @param {Function} chooser * * @returns {Presence} */ static list(presences, chooser){ if(!chooser){ chooser = function(key, pres){ return pres } } return this.map(presences, (key, presence) => { return chooser(key, presence) }) } // private static map(obj, func){ return Object.getOwnPropertyNames(obj).map(key => func(key, obj[key])) } static clone(obj){ return JSON.parse(JSON.stringify(obj)) } } /** * * Creates a timer that accepts a `timerCalc` function to perform * calculated timeout retries, such as exponential backoff. * * @example * let reconnectTimer = new Timer(() => this.connect(), function(tries){ * return [1000, 5000, 10000][tries - 1] || 10000 * }) * reconnectTimer.scheduleTimeout() // fires after 1000 * reconnectTimer.scheduleTimeout() // fires after 5000 * reconnectTimer.reset() * reconnectTimer.scheduleTimeout() // fires after 1000 * * @param {Function} callback * @param {Function} timerCalc */ class Timer { constructor(callback, timerCalc){ this.callback = callback this.timerCalc = timerCalc this.timer = null this.tries = 0 } reset(){ this.tries = 0 clearTimeout(this.timer) } /** * Cancels any previous scheduleTimeout and schedules callback */ scheduleTimeout(){ clearTimeout(this.timer) this.timer = setTimeout(() => { this.tries = this.tries + 1 this.callback() }, this.timerCalc(this.tries + 1)) } }
addon/phoenix.js
// Phoenix Channels JavaScript client // // ## Socket Connection // // A single connection is established to the server and // channels are multiplexed over the connection. // Connect to the server using the `Socket` class: // // let socket = new Socket("/ws", {params: {userToken: "123"}}) // socket.connect() // // The `Socket` constructor takes the mount point of the socket, // the authentication params, as well as options that can be found in // the Socket docs, such as configuring the `LongPoll` transport, and // heartbeat. // // ## Channels // // Channels are isolated, concurrent processes on the server that // subscribe to topics and broker events between the client and server. // To join a channel, you must provide the topic, and channel params for // authorization. Here's an example chat room example where `"new_msg"` // events are listened for, messages are pushed to the server, and // the channel is joined with ok/error/timeout matches: // // let channel = socket.channel("room:123", {token: roomToken}) // channel.on("new_msg", msg => console.log("Got message", msg) ) // $input.onEnter( e => { // channel.push("new_msg", {body: e.target.val}, 10000) // .receive("ok", (msg) => console.log("created message", msg) ) // .receive("error", (reasons) => console.log("create failed", reasons) ) // .receive("timeout", () => console.log("Networking issue...") ) // }) // channel.join() // .receive("ok", ({messages}) => console.log("catching up", messages) ) // .receive("error", ({reason}) => console.log("failed join", reason) ) // .receive("timeout", () => console.log("Networking issue. Still waiting...") ) // // // ## Joining // // Creating a channel with `socket.channel(topic, params)`, binds the params to // `channel.params`, which are sent up on `channel.join()`. // Subsequent rejoins will send up the modified params for // updating authorization params, or passing up last_message_id information. // Successful joins receive an "ok" status, while unsuccessful joins // receive "error". // // ## Duplicate Join Subscriptions // // While the client may join any number of topics on any number of channels, // the client may only hold a single subscription for each unique topic at any // given time. When attempting to create a duplicate subscription, // the server will close the existing channel, log a warning, and // spawn a new channel for the topic. The client will have their // `channel.onClose` callbacks fired for the existing channel, and the new // channel join will have its receive hooks processed as normal. // // ## Pushing Messages // // From the previous example, we can see that pushing messages to the server // can be done with `channel.push(eventName, payload)` and we can optionally // receive responses from the push. Additionally, we can use // `receive("timeout", callback)` to abort waiting for our other `receive` hooks // and take action after some period of waiting. The default timeout is 5000ms. // // // ## Socket Hooks // // Lifecycle events of the multiplexed connection can be hooked into via // `socket.onError()` and `socket.onClose()` events, ie: // // socket.onError( () => console.log("there was an error with the connection!") ) // socket.onClose( () => console.log("the connection dropped") ) // // // ## Channel Hooks // // For each joined channel, you can bind to `onError` and `onClose` events // to monitor the channel lifecycle, ie: // // channel.onError( () => console.log("there was an error!") ) // channel.onClose( () => console.log("the channel has gone away gracefully") ) // // ### onError hooks // // `onError` hooks are invoked if the socket connection drops, or the channel // crashes on the server. In either case, a channel rejoin is attempted // automatically in an exponential backoff manner. // // ### onClose hooks // // `onClose` hooks are invoked only in two cases. 1) the channel explicitly // closed on the server, or 2). The client explicitly closed, by calling // `channel.leave()` // // // ## Presence // // The `Presence` object provides features for syncing presence information // from the server with the client and handling presences joining and leaving. // // ### Syncing initial state from the server // // `Presence.syncState` is used to sync the list of presences on the server // with the client's state. An optional `onJoin` and `onLeave` callback can // be provided to react to changes in the client's local presences across // disconnects and reconnects with the server. // // `Presence.syncDiff` is used to sync a diff of presence join and leave // events from the server, as they happen. Like `syncState`, `syncDiff` // accepts optional `onJoin` and `onLeave` callbacks to react to a user // joining or leaving from a device. // // ### Listing Presences // // `Presence.list` is used to return a list of presence information // based on the local state of metadata. By default, all presence // metadata is returned, but a `listBy` function can be supplied to // allow the client to select which metadata to use for a given presence. // For example, you may have a user online from different devices with a // a metadata status of "online", but they have set themselves to "away" // on another device. In this case, they app may choose to use the "away" // status for what appears on the UI. The example below defines a `listBy` // function which prioritizes the first metadata which was registered for // each user. This could be the first tab they opened, or the first device // they came online from: // // let state = {} // Presence.syncState(state, stateFromServer) // let listBy = (id, {metas: [first, ...rest]}) => { // first.count = rest.length + 1 // count of this user's presences // first.id = id // return first // } // let onlineUsers = Presence.list(state, listBy) // // // ### Example Usage // // // detect if user has joined for the 1st time or from another tab/device // let onJoin = (id, current, newPres) => { // if(!current){ // console.log("user has entered for the first time", newPres) // } else { // console.log("user additional presence", newPres) // } // } // // detect if user has left from all tabs/devices, or is still present // let onLeave = (id, current, leftPres) => { // if(current.metas.length === 0){ // console.log("user has left from all devices", leftPres) // } else { // console.log("user left from a device", leftPres) // } // } // let presences = {} // client's initial empty presence state // // receive initial presence data from server, sent after join // myChannel.on("presences", state => { // Presence.syncState(presences, state, onJoin, onLeave) // displayUsers(Presence.list(presences)) // }) // // receive "presence_diff" from server, containing join/leave events // myChannel.on("presence_diff", diff => { // Presence.syncDiff(presences, diff, onJoin, onLeave) // this.setState({users: Presence.list(room.presences, listBy)}) // }) // const VSN = "1.0.0" const SOCKET_STATES = {connecting: 0, open: 1, closing: 2, closed: 3} const DEFAULT_TIMEOUT = 10000 const CHANNEL_STATES = { closed: "closed", errored: "errored", joined: "joined", joining: "joining", leaving: "leaving", } const CHANNEL_EVENTS = { close: "phx_close", error: "phx_error", join: "phx_join", reply: "phx_reply", leave: "phx_leave" } const TRANSPORTS = { longpoll: "longpoll", websocket: "websocket" } class Push { // Initializes the Push // // channel - The Channel // event - The event, for example `"phx_join"` // payload - The payload, for example `{user_id: 123}` // timeout - The push timeout in milliseconds // constructor(channel, event, payload, timeout){ this.channel = channel this.event = event this.payload = payload || {} this.receivedResp = null this.timeout = timeout this.timeoutTimer = null this.recHooks = [] this.sent = false } resend(timeout){ this.timeout = timeout this.cancelRefEvent() this.ref = null this.refEvent = null this.receivedResp = null this.sent = false this.send() } send(){ if(this.hasReceived("timeout")){ return } this.startTimeout() this.sent = true this.channel.socket.push({ topic: this.channel.topic, event: this.event, payload: this.payload, ref: this.ref }) } receive(status, callback){ if(this.hasReceived(status)){ callback(this.receivedResp.response) } this.recHooks.push({status, callback}) return this } // private matchReceive({status, response, ref}){ this.recHooks.filter( h => h.status === status ) .forEach( h => h.callback(response) ) } cancelRefEvent(){ if(!this.refEvent){ return } this.channel.off(this.refEvent) } cancelTimeout(){ clearTimeout(this.timeoutTimer) this.timeoutTimer = null } startTimeout(){ if(this.timeoutTimer){ return } this.ref = this.channel.socket.makeRef() this.refEvent = this.channel.replyEventName(this.ref) this.channel.on(this.refEvent, payload => { this.cancelRefEvent() this.cancelTimeout() this.receivedResp = payload this.matchReceive(payload) }) this.timeoutTimer = setTimeout(() => { this.trigger("timeout", {}) }, this.timeout) } hasReceived(status){ return this.receivedResp && this.receivedResp.status === status } trigger(status, response){ this.channel.trigger(this.refEvent, {status, response}) } } export class Channel { constructor(topic, params, socket) { this.state = CHANNEL_STATES.closed this.topic = topic this.params = params || {} this.socket = socket this.bindings = [] this.timeout = this.socket.timeout this.joinedOnce = false this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.params, this.timeout) this.pushBuffer = [] this.rejoinTimer = new Timer( () => this.rejoinUntilConnected(), this.socket.reconnectAfterMs ) this.joinPush.receive("ok", () => { this.state = CHANNEL_STATES.joined this.rejoinTimer.reset() this.pushBuffer.forEach( pushEvent => pushEvent.send() ) this.pushBuffer = [] }) this.onClose( () => { this.socket.log("channel", `close ${this.topic} ${this.joinRef()}`) this.state = CHANNEL_STATES.closed this.socket.remove(this) }) this.onError( reason => { this.socket.log("channel", `error ${this.topic}`, reason) this.state = CHANNEL_STATES.errored this.rejoinTimer.scheduleTimeout() }) this.joinPush.receive("timeout", () => { if(this.state !== CHANNEL_STATES.joining){ return } this.socket.log("channel", `timeout ${this.topic}`, this.joinPush.timeout) this.state = CHANNEL_STATES.errored this.rejoinTimer.scheduleTimeout() }) this.on(CHANNEL_EVENTS.reply, (payload, ref) => { this.trigger(this.replyEventName(ref), payload) }) } rejoinUntilConnected(){ this.rejoinTimer.scheduleTimeout() if(this.socket.isConnected()){ this.rejoin() } } join(timeout = this.timeout){ if(this.joinedOnce){ throw(`tried to join multiple times. 'join' can only be called a single time per channel instance`) } else { this.joinedOnce = true this.rejoin(timeout) return this.joinPush } } onClose(callback){ this.on(CHANNEL_EVENTS.close, callback) } onError(callback){ this.on(CHANNEL_EVENTS.error, reason => callback(reason) ) } on(event, callback){ this.bindings.push({event, callback}) } off(event){ this.bindings = this.bindings.filter( bind => bind.event !== event ) } canPush(){ return this.socket.isConnected() && this.state === CHANNEL_STATES.joined } push(event, payload, timeout = this.timeout){ if(!this.joinedOnce){ throw(`tried to push '${event}' to '${this.topic}' before joining. Use channel.join() before pushing events`) } let pushEvent = new Push(this, event, payload, timeout) if(this.canPush()){ pushEvent.send() } else { pushEvent.startTimeout() this.pushBuffer.push(pushEvent) } return pushEvent } // Leaves the channel // // Unsubscribes from server events, and // instructs channel to terminate on server // // Triggers onClose() hooks // // To receive leave acknowledgements, use the a `receive` // hook to bind to the server ack, ie: // // channel.leave().receive("ok", () => alert("left!") ) // leave(timeout = this.timeout){ this.state = CHANNEL_STATES.leaving let onClose = () => { this.socket.log("channel", `leave ${this.topic}`) this.trigger(CHANNEL_EVENTS.close, "leave", this.joinRef()) } let leavePush = new Push(this, CHANNEL_EVENTS.leave, {}, timeout) leavePush.receive("ok", () => onClose() ) .receive("timeout", () => onClose() ) leavePush.send() if(!this.canPush()){ leavePush.trigger("ok", {}) } return leavePush } // Overridable message hook // // Receives all events for specialized message handling onMessage(event, payload, ref){} // private isMember(topic){ return this.topic === topic } joinRef(){ return this.joinPush.ref } sendJoin(timeout){ this.state = CHANNEL_STATES.joining this.joinPush.resend(timeout) } rejoin(timeout = this.timeout){ if(this.state === CHANNEL_STATES.leaving){ return } this.sendJoin(timeout) } trigger(event, payload, ref){ let {close, error, leave, join} = CHANNEL_EVENTS if(ref && [close, error, leave, join].indexOf(event) >= 0 && ref !== this.joinRef()){ return } this.onMessage(event, payload, ref) this.bindings.filter( bind => bind.event === event) .map( bind => bind.callback(payload, ref)) } replyEventName(ref){ return `chan_reply_${ref}` } } export class Socket { // Initializes the Socket // // endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws", // "wss://example.com" // "/ws" (inherited host & protocol) // opts - Optional configuration // transport - The Websocket Transport, for example WebSocket or Phoenix.LongPoll. // Defaults to WebSocket with automatic LongPoll fallback. // timeout - The default timeout in milliseconds to trigger push timeouts. // Defaults `DEFAULT_TIMEOUT` // heartbeatIntervalMs - The millisec interval to send a heartbeat message // reconnectAfterMs - The optional function that returns the millsec // reconnect interval. Defaults to stepped backoff of: // // function(tries){ // return [1000, 5000, 10000][tries - 1] || 10000 // } // // logger - The optional function for specialized logging, ie: // `logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) } // // longpollerTimeout - The maximum timeout of a long poll AJAX request. // Defaults to 20s (double the server long poll timer). // // params - The optional params to pass when connecting // // For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim) // constructor(endPoint, opts = {}){ this.stateChangeCallbacks = {open: [], close: [], error: [], message: []} this.channels = [] this.sendBuffer = [] this.ref = 0 this.timeout = opts.timeout || DEFAULT_TIMEOUT this.transport = opts.transport || window.WebSocket || LongPoll this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000 this.reconnectAfterMs = opts.reconnectAfterMs || function(tries){ return [1000, 2000, 5000, 10000][tries - 1] || 10000 } this.logger = opts.logger || function(){} // noop this.longpollerTimeout = opts.longpollerTimeout || 20000 this.params = opts.params || {} this.endPoint = `${endPoint}/${TRANSPORTS.websocket}` this.reconnectTimer = new Timer(() => { this.disconnect(() => this.connect()) }, this.reconnectAfterMs) } protocol(){ return location.protocol.match(/^https/) ? "wss" : "ws" } endPointURL(){ let uri = Ajax.appendParams( Ajax.appendParams(this.endPoint, this.params), {vsn: VSN}) if(uri.charAt(0) !== "/"){ return uri } if(uri.charAt(1) === "/"){ return `${this.protocol()}:${uri}` } return `${this.protocol()}://${location.host}${uri}` } disconnect(callback, code, reason){ if(this.conn){ this.conn.onclose = function(){} // noop if(code){ this.conn.close(code, reason || "") } else { this.conn.close() } this.conn = null } callback && callback() } // params - The params to send when connecting, for example `{user_id: userToken}` connect(params){ if(params){ console && console.log("passing params to connect is deprecated. Instead pass :params to the Socket constructor") this.params = params } if(this.conn){ return } this.conn = new this.transport(this.endPointURL()) this.conn.timeout = this.longpollerTimeout this.conn.onopen = () => this.onConnOpen() this.conn.onerror = error => this.onConnError(error) this.conn.onmessage = event => this.onConnMessage(event) this.conn.onclose = event => this.onConnClose(event) } // Logs the message. Override `this.logger` for specialized logging. noops by default log(kind, msg, data){ this.logger(kind, msg, data) } // Registers callbacks for connection state change events // // Examples // // socket.onError(function(error){ alert("An error occurred") }) // onOpen (callback){ this.stateChangeCallbacks.open.push(callback) } onClose (callback){ this.stateChangeCallbacks.close.push(callback) } onError (callback){ this.stateChangeCallbacks.error.push(callback) } onMessage (callback){ this.stateChangeCallbacks.message.push(callback) } onConnOpen(){ this.log("transport", `connected to ${this.endPointURL()}`, this.transport.prototype) this.flushSendBuffer() this.reconnectTimer.reset() if(!this.conn.skipHeartbeat){ clearInterval(this.heartbeatTimer) this.heartbeatTimer = setInterval(() => this.sendHeartbeat(), this.heartbeatIntervalMs) } this.stateChangeCallbacks.open.forEach( callback => callback() ) } onConnClose(event){ this.log("transport", "close", event) this.triggerChanError() clearInterval(this.heartbeatTimer) this.reconnectTimer.scheduleTimeout() this.stateChangeCallbacks.close.forEach( callback => callback(event) ) } onConnError(error){ this.log("transport", error) this.triggerChanError() this.stateChangeCallbacks.error.forEach( callback => callback(error) ) } triggerChanError(){ this.channels.forEach( channel => channel.trigger(CHANNEL_EVENTS.error) ) } connectionState(){ switch(this.conn && this.conn.readyState){ case SOCKET_STATES.connecting: return "connecting" case SOCKET_STATES.open: return "open" case SOCKET_STATES.closing: return "closing" default: return "closed" } } isConnected(){ return this.connectionState() === "open" } remove(channel){ this.channels = this.channels.filter(c => c.joinRef() !== channel.joinRef()) } channel(topic, chanParams = {}){ let chan = new Channel(topic, chanParams, this) this.channels.push(chan) return chan } push(data){ let {topic, event, payload, ref} = data let callback = () => this.conn.send(JSON.stringify(data)) this.log("push", `${topic} ${event} (${ref})`, payload) if(this.isConnected()){ callback() } else { this.sendBuffer.push(callback) } } // Return the next message ref, accounting for overflows makeRef(){ let newRef = this.ref + 1 if(newRef === this.ref){ this.ref = 0 } else { this.ref = newRef } return this.ref.toString() } sendHeartbeat(){ if(!this.isConnected()){ return } this.push({topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef()}) } flushSendBuffer(){ if(this.isConnected() && this.sendBuffer.length > 0){ this.sendBuffer.forEach( callback => callback() ) this.sendBuffer = [] } } onConnMessage(rawMessage){ let msg = JSON.parse(rawMessage.data) let {topic, event, payload, ref} = msg this.log("receive", `${payload.status || ""} ${topic} ${event} ${ref && "(" + ref + ")" || ""}`, payload) this.channels.filter( channel => channel.isMember(topic) ) .forEach( channel => channel.trigger(event, payload, ref) ) this.stateChangeCallbacks.message.forEach( callback => callback(msg) ) } } export class LongPoll { constructor(endPoint){ this.endPoint = null this.token = null this.skipHeartbeat = true this.onopen = function(){} // noop this.onerror = function(){} // noop this.onmessage = function(){} // noop this.onclose = function(){} // noop this.pollEndpoint = this.normalizeEndpoint(endPoint) this.readyState = SOCKET_STATES.connecting this.poll() } normalizeEndpoint(endPoint){ return(endPoint .replace("ws://", "http://") .replace("wss://", "https://") .replace(new RegExp("(.*)\/" + TRANSPORTS.websocket), "$1/" + TRANSPORTS.longpoll)) } endpointURL(){ return Ajax.appendParams(this.pollEndpoint, {token: this.token}) } closeAndRetry(){ this.close() this.readyState = SOCKET_STATES.connecting } ontimeout(){ this.onerror("timeout") this.closeAndRetry() } poll(){ if(!(this.readyState === SOCKET_STATES.open || this.readyState === SOCKET_STATES.connecting)){ return } Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), (resp) => { if(resp){ var {status, token, messages} = resp this.token = token } else{ var status = 0 } switch(status){ case 200: messages.forEach( msg => this.onmessage({data: JSON.stringify(msg)}) ) this.poll() break case 204: this.poll() break case 410: this.readyState = SOCKET_STATES.open this.onopen() this.poll() break case 0: case 500: this.onerror() this.closeAndRetry() break default: throw(`unhandled poll status ${status}`) } }) } send(body){ Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), (resp) => { if(!resp || resp.status !== 200){ this.onerror(status) this.closeAndRetry() } }) } close(code, reason){ this.readyState = SOCKET_STATES.closed this.onclose() } } export class Ajax { static request(method, endPoint, accept, body, timeout, ontimeout, callback){ if(window.XDomainRequest){ let req = new XDomainRequest() // IE8, IE9 this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) } else { let req = window.XMLHttpRequest ? new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari new ActiveXObject("Microsoft.XMLHTTP") // IE6, IE5 this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) } } static xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback){ req.timeout = timeout req.open(method, endPoint) req.onload = () => { let response = this.parseJSON(req.responseText) callback && callback(response) } if(ontimeout){ req.ontimeout = ontimeout } // Work around bug in IE9 that requires an attached onprogress handler req.onprogress = () => {} req.send(body) } static xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback){ req.timeout = timeout req.open(method, endPoint, true) req.setRequestHeader("Content-Type", accept) req.onerror = () => { callback && callback(null) } req.onreadystatechange = () => { if(req.readyState === this.states.complete && callback){ let response = this.parseJSON(req.responseText) callback(response) } } if(ontimeout){ req.ontimeout = ontimeout } req.send(body) } static parseJSON(resp){ return (resp && resp !== "") ? JSON.parse(resp) : null } static serialize(obj, parentKey){ let queryStr = []; for(var key in obj){ if(!obj.hasOwnProperty(key)){ continue } let paramKey = parentKey ? `${parentKey}[${key}]` : key let paramVal = obj[key] if(typeof paramVal === "object"){ queryStr.push(this.serialize(paramVal, paramKey)) } else { queryStr.push(encodeURIComponent(paramKey) + "=" + encodeURIComponent(paramVal)) } } return queryStr.join("&") } static appendParams(url, params){ if(Object.keys(params).length === 0){ return url } let prefix = url.match(/\?/) ? "&" : "?" return `${url}${prefix}${this.serialize(params)}` } } Ajax.states = {complete: 4} export var Presence = { syncState(state, newState, onJoin, onLeave){ let joins = {} let leaves = {} this.map(state, (key, presence) => { if(!newState[key]){ leaves[key] = this.clone(presence) } }) this.map(newState, (key, newPresence) => { let currentPresence = state[key] if(currentPresence){ let newRefs = newPresence.metas.map(m => m.phx_ref) let curRefs = currentPresence.metas.map(m => m.phx_ref) let joinedMetas = newPresence.metas.filter(m => curRefs.indexOf(m.phx_ref) < 0) let leftMetas = currentPresence.metas.filter(m => newRefs.indexOf(m.phx_ref) < 0) if(joinedMetas.length > 0){ joins[key] = newPresence joins[key].metas = joinedMetas } if(leftMetas.length > 0){ leaves[key] = this.clone(currentPresence) leaves[key].metas = leftMetas } } else { joins[key] = newPresence } }) this.syncDiff(state, {joins: joins, leaves: leaves}, onJoin, onLeave) }, syncDiff(state, {joins, leaves}, onJoin, onLeave){ if(!onJoin){ onJoin = function(){} } if(!onLeave){ onLeave = function(){} } this.map(joins, (key, newPresence) => { let currentPresence = state[key] state[key] = newPresence if(currentPresence){ state[key].metas.unshift(...currentPresence.metas) } onJoin(key, currentPresence, newPresence) }) this.map(leaves, (key, leftPresence) => { let currentPresence = state[key] if(!currentPresence){ return } let refsToRemove = leftPresence.metas.map(m => m.phx_ref) currentPresence.metas = currentPresence.metas.filter(p => { return refsToRemove.indexOf(p.phx_ref) < 0 }) onLeave(key, currentPresence, leftPresence) if(currentPresence.metas.length === 0){ delete state[key] } }) }, list(presences, chooser){ if(!chooser){ chooser = function(key, pres){ return pres } } return this.map(presences, (key, presence) => { return chooser(key, presence) }) }, // private map(obj, func){ return Object.getOwnPropertyNames(obj).map(key => func(key, obj[key])) }, clone(obj){ return JSON.parse(JSON.stringify(obj)) } } // Creates a timer that accepts a `timerCalc` function to perform // calculated timeout retries, such as exponential backoff. // // ## Examples // // let reconnectTimer = new Timer(() => this.connect(), function(tries){ // return [1000, 5000, 10000][tries - 1] || 10000 // }) // reconnectTimer.scheduleTimeout() // fires after 1000 // reconnectTimer.scheduleTimeout() // fires after 5000 // reconnectTimer.reset() // reconnectTimer.scheduleTimeout() // fires after 1000 // class Timer { constructor(callback, timerCalc){ this.callback = callback this.timerCalc = timerCalc this.timer = null this.tries = 0 } reset(){ this.tries = 0 clearTimeout(this.timer) } // Cancels any previous scheduleTimeout and schedules callback scheduleTimeout(){ clearTimeout(this.timer) this.timer = setTimeout(() => { this.tries = this.tries + 1 this.callback() }, this.timerCalc(this.tries + 1)) } }
Use latest phoenix.js
addon/phoenix.js
Use latest phoenix.js
<ide><path>ddon/phoenix.js <del>// Phoenix Channels JavaScript client <del>// <del>// ## Socket Connection <del>// <del>// A single connection is established to the server and <del>// channels are multiplexed over the connection. <del>// Connect to the server using the `Socket` class: <del>// <del>// let socket = new Socket("/ws", {params: {userToken: "123"}}) <del>// socket.connect() <del>// <del>// The `Socket` constructor takes the mount point of the socket, <del>// the authentication params, as well as options that can be found in <del>// the Socket docs, such as configuring the `LongPoll` transport, and <del>// heartbeat. <del>// <del>// ## Channels <del>// <del>// Channels are isolated, concurrent processes on the server that <del>// subscribe to topics and broker events between the client and server. <del>// To join a channel, you must provide the topic, and channel params for <del>// authorization. Here's an example chat room example where `"new_msg"` <del>// events are listened for, messages are pushed to the server, and <del>// the channel is joined with ok/error/timeout matches: <del>// <del>// let channel = socket.channel("room:123", {token: roomToken}) <del>// channel.on("new_msg", msg => console.log("Got message", msg) ) <del>// $input.onEnter( e => { <del>// channel.push("new_msg", {body: e.target.val}, 10000) <del>// .receive("ok", (msg) => console.log("created message", msg) ) <del>// .receive("error", (reasons) => console.log("create failed", reasons) ) <del>// .receive("timeout", () => console.log("Networking issue...") ) <del>// }) <del>// channel.join() <del>// .receive("ok", ({messages}) => console.log("catching up", messages) ) <del>// .receive("error", ({reason}) => console.log("failed join", reason) ) <del>// .receive("timeout", () => console.log("Networking issue. Still waiting...") ) <del>// <del>// <del>// ## Joining <del>// <del>// Creating a channel with `socket.channel(topic, params)`, binds the params to <del>// `channel.params`, which are sent up on `channel.join()`. <del>// Subsequent rejoins will send up the modified params for <del>// updating authorization params, or passing up last_message_id information. <del>// Successful joins receive an "ok" status, while unsuccessful joins <del>// receive "error". <del>// <del>// ## Duplicate Join Subscriptions <del>// <del>// While the client may join any number of topics on any number of channels, <del>// the client may only hold a single subscription for each unique topic at any <del>// given time. When attempting to create a duplicate subscription, <del>// the server will close the existing channel, log a warning, and <del>// spawn a new channel for the topic. The client will have their <del>// `channel.onClose` callbacks fired for the existing channel, and the new <del>// channel join will have its receive hooks processed as normal. <del>// <del>// ## Pushing Messages <del>// <del>// From the previous example, we can see that pushing messages to the server <del>// can be done with `channel.push(eventName, payload)` and we can optionally <del>// receive responses from the push. Additionally, we can use <del>// `receive("timeout", callback)` to abort waiting for our other `receive` hooks <del>// and take action after some period of waiting. The default timeout is 5000ms. <del>// <del>// <del>// ## Socket Hooks <del>// <del>// Lifecycle events of the multiplexed connection can be hooked into via <del>// `socket.onError()` and `socket.onClose()` events, ie: <del>// <del>// socket.onError( () => console.log("there was an error with the connection!") ) <del>// socket.onClose( () => console.log("the connection dropped") ) <del>// <del>// <del>// ## Channel Hooks <del>// <del>// For each joined channel, you can bind to `onError` and `onClose` events <del>// to monitor the channel lifecycle, ie: <del>// <del>// channel.onError( () => console.log("there was an error!") ) <del>// channel.onClose( () => console.log("the channel has gone away gracefully") ) <del>// <del>// ### onError hooks <del>// <del>// `onError` hooks are invoked if the socket connection drops, or the channel <del>// crashes on the server. In either case, a channel rejoin is attempted <del>// automatically in an exponential backoff manner. <del>// <del>// ### onClose hooks <del>// <del>// `onClose` hooks are invoked only in two cases. 1) the channel explicitly <del>// closed on the server, or 2). The client explicitly closed, by calling <del>// `channel.leave()` <del>// <del>// <del>// ## Presence <del>// <del>// The `Presence` object provides features for syncing presence information <del>// from the server with the client and handling presences joining and leaving. <del>// <del>// ### Syncing initial state from the server <del>// <del>// `Presence.syncState` is used to sync the list of presences on the server <del>// with the client's state. An optional `onJoin` and `onLeave` callback can <del>// be provided to react to changes in the client's local presences across <del>// disconnects and reconnects with the server. <del>// <del>// `Presence.syncDiff` is used to sync a diff of presence join and leave <del>// events from the server, as they happen. Like `syncState`, `syncDiff` <del>// accepts optional `onJoin` and `onLeave` callbacks to react to a user <del>// joining or leaving from a device. <del>// <del>// ### Listing Presences <del>// <del>// `Presence.list` is used to return a list of presence information <del>// based on the local state of metadata. By default, all presence <del>// metadata is returned, but a `listBy` function can be supplied to <del>// allow the client to select which metadata to use for a given presence. <del>// For example, you may have a user online from different devices with a <del>// a metadata status of "online", but they have set themselves to "away" <del>// on another device. In this case, they app may choose to use the "away" <del>// status for what appears on the UI. The example below defines a `listBy` <del>// function which prioritizes the first metadata which was registered for <del>// each user. This could be the first tab they opened, or the first device <del>// they came online from: <del>// <del>// let state = {} <del>// Presence.syncState(state, stateFromServer) <del>// let listBy = (id, {metas: [first, ...rest]}) => { <del>// first.count = rest.length + 1 // count of this user's presences <del>// first.id = id <del>// return first <del>// } <del>// let onlineUsers = Presence.list(state, listBy) <del>// <del>// <del>// ### Example Usage <del>// <del>// // detect if user has joined for the 1st time or from another tab/device <del>// let onJoin = (id, current, newPres) => { <del>// if(!current){ <del>// console.log("user has entered for the first time", newPres) <del>// } else { <del>// console.log("user additional presence", newPres) <del>// } <del>// } <del>// // detect if user has left from all tabs/devices, or is still present <del>// let onLeave = (id, current, leftPres) => { <del>// if(current.metas.length === 0){ <del>// console.log("user has left from all devices", leftPres) <del>// } else { <del>// console.log("user left from a device", leftPres) <del>// } <del>// } <del>// let presences = {} // client's initial empty presence state <del>// // receive initial presence data from server, sent after join <del>// myChannel.on("presences", state => { <del>// Presence.syncState(presences, state, onJoin, onLeave) <del>// displayUsers(Presence.list(presences)) <del>// }) <del>// // receive "presence_diff" from server, containing join/leave events <del>// myChannel.on("presence_diff", diff => { <del>// Presence.syncDiff(presences, diff, onJoin, onLeave) <del>// this.setState({users: Presence.list(room.presences, listBy)}) <del>// }) <del>// <del>const VSN = "1.0.0" <add>/** <add> * Phoenix Channels JavaScript client <add> * <add> * ## Socket Connection <add> * <add> * A single connection is established to the server and <add> * channels are multiplexed over the connection. <add> * Connect to the server using the `Socket` class: <add> * <add> * ```javascript <add> * let socket = new Socket("/socket", {params: {userToken: "123"}}) <add> * socket.connect() <add> * ``` <add> * <add> * The `Socket` constructor takes the mount point of the socket, <add> * the authentication params, as well as options that can be found in <add> * the Socket docs, such as configuring the `LongPoll` transport, and <add> * heartbeat. <add> * <add> * ## Channels <add> * <add> * Channels are isolated, concurrent processes on the server that <add> * subscribe to topics and broker events between the client and server. <add> * To join a channel, you must provide the topic, and channel params for <add> * authorization. Here's an example chat room example where `"new_msg"` <add> * events are listened for, messages are pushed to the server, and <add> * the channel is joined with ok/error/timeout matches: <add> * <add> * ```javascript <add> * let channel = socket.channel("room:123", {token: roomToken}) <add> * channel.on("new_msg", msg => console.log("Got message", msg) ) <add> * $input.onEnter( e => { <add> * channel.push("new_msg", {body: e.target.val}, 10000) <add> * .receive("ok", (msg) => console.log("created message", msg) ) <add> * .receive("error", (reasons) => console.log("create failed", reasons) ) <add> * .receive("timeout", () => console.log("Networking issue...") ) <add> * }) <add> * <add> * channel.join() <add> * .receive("ok", ({messages}) => console.log("catching up", messages) ) <add> * .receive("error", ({reason}) => console.log("failed join", reason) ) <add> * .receive("timeout", () => console.log("Networking issue. Still waiting...")) <add> *``` <add> * <add> * ## Joining <add> * <add> * Creating a channel with `socket.channel(topic, params)`, binds the params to <add> * `channel.params`, which are sent up on `channel.join()`. <add> * Subsequent rejoins will send up the modified params for <add> * updating authorization params, or passing up last_message_id information. <add> * Successful joins receive an "ok" status, while unsuccessful joins <add> * receive "error". <add> * <add> * ## Duplicate Join Subscriptions <add> * <add> * While the client may join any number of topics on any number of channels, <add> * the client may only hold a single subscription for each unique topic at any <add> * given time. When attempting to create a duplicate subscription, <add> * the server will close the existing channel, log a warning, and <add> * spawn a new channel for the topic. The client will have their <add> * `channel.onClose` callbacks fired for the existing channel, and the new <add> * channel join will have its receive hooks processed as normal. <add> * <add> * ## Pushing Messages <add> * <add> * From the previous example, we can see that pushing messages to the server <add> * can be done with `channel.push(eventName, payload)` and we can optionally <add> * receive responses from the push. Additionally, we can use <add> * `receive("timeout", callback)` to abort waiting for our other `receive` hooks <add> * and take action after some period of waiting. The default timeout is 10000ms. <add> * <add> * <add> * ## Socket Hooks <add> * <add> * Lifecycle events of the multiplexed connection can be hooked into via <add> * `socket.onError()` and `socket.onClose()` events, ie: <add> * <add> * ```javascript <add> * socket.onError( () => console.log("there was an error with the connection!") ) <add> * socket.onClose( () => console.log("the connection dropped") ) <add> * ``` <add> * <add> * <add> * ## Channel Hooks <add> * <add> * For each joined channel, you can bind to `onError` and `onClose` events <add> * to monitor the channel lifecycle, ie: <add> * <add> * ```javascript <add> * channel.onError( () => console.log("there was an error!") ) <add> * channel.onClose( () => console.log("the channel has gone away gracefully") ) <add> * ``` <add> * <add> * ### onError hooks <add> * <add> * `onError` hooks are invoked if the socket connection drops, or the channel <add> * crashes on the server. In either case, a channel rejoin is attempted <add> * automatically in an exponential backoff manner. <add> * <add> * ### onClose hooks <add> * <add> * `onClose` hooks are invoked only in two cases. 1) the channel explicitly <add> * closed on the server, or 2). The client explicitly closed, by calling <add> * `channel.leave()` <add> * <add> * <add> * ## Presence <add> * <add> * The `Presence` object provides features for syncing presence information <add> * from the server with the client and handling presences joining and leaving. <add> * <add> * ### Syncing state from the server <add> * <add> * To sync presence state from the server, first instantiate an object and <add> * pass your channel in to track lifecycle events: <add> * <add> * ```javascript <add> * let channel = socket.channel("some:topic") <add> * let presence = new Presence(channel) <add> * ``` <add> * <add> * Next, use the `presence.onSync` callback to react to state changes <add> * from the server. For example, to render the list of users every time <add> * the list changes, you could write: <add> * <add> * ```javascript <add> * presence.onSync(() => { <add> * myRenderUsersFunction(presence.list()) <add> * }) <add> * ``` <add> * <add> * ### Listing Presences <add> * <add> * `presence.list` is used to return a list of presence information <add> * based on the local state of metadata. By default, all presence <add> * metadata is returned, but a `listBy` function can be supplied to <add> * allow the client to select which metadata to use for a given presence. <add> * For example, you may have a user online from different devices with <add> * a metadata status of "online", but they have set themselves to "away" <add> * on another device. In this case, the app may choose to use the "away" <add> * status for what appears on the UI. The example below defines a `listBy` <add> * function which prioritizes the first metadata which was registered for <add> * each user. This could be the first tab they opened, or the first device <add> * they came online from: <add> * <add> * ```javascript <add> * let listBy = (id, {metas: [first, ...rest]}) => { <add> * first.count = rest.length + 1 // count of this user's presences <add> * first.id = id <add> * return first <add> * } <add> * let onlineUsers = presence.list(listBy) <add> * ``` <add> * <add> * ### Handling individual presence join and leave events <add> * <add> * The `presence.onJoin` and `presence.onLeave` callbacks can be used to <add> * react to individual presences joining and leaving the app. For example: <add> * <add> * ```javascript <add> * let presence = new Presence(channel) <add> * <add> * // detect if user has joined for the 1st time or from another tab/device <add> * presence.onJoin((id, current, newPres) => { <add> * if(!current){ <add> * console.log("user has entered for the first time", newPres) <add> * } else { <add> * console.log("user additional presence", newPres) <add> * } <add> * }) <add> * <add> * // detect if user has left from all tabs/devices, or is still present <add> * presence.onLeave((id, current, leftPres) => { <add> * if(current.metas.length === 0){ <add> * console.log("user has left from all devices", leftPres) <add> * } else { <add> * console.log("user left from a device", leftPres) <add> * } <add> * }) <add> * // receive presence data from server <add> * presence.onSync(() => { <add> * displayUsers(presence.list()) <add> * }) <add> * ``` <add> * @module phoenix <add> */ <add> <add>const globalSelf = typeof self !== "undefined" ? self : null <add>const phxWindow = typeof window !== "undefined" ? window : null <add>const global = globalSelf || phxWindow || this <add>const DEFAULT_VSN = "2.0.0" <ide> const SOCKET_STATES = {connecting: 0, open: 1, closing: 2, closed: 3} <ide> const DEFAULT_TIMEOUT = 10000 <add>const WS_CLOSE_NORMAL = 1000 <ide> const CHANNEL_STATES = { <ide> closed: "closed", <ide> errored: "errored", <ide> reply: "phx_reply", <ide> leave: "phx_leave" <ide> } <add>const CHANNEL_LIFECYCLE_EVENTS = [ <add> CHANNEL_EVENTS.close, <add> CHANNEL_EVENTS.error, <add> CHANNEL_EVENTS.join, <add> CHANNEL_EVENTS.reply, <add> CHANNEL_EVENTS.leave <add>] <ide> const TRANSPORTS = { <ide> longpoll: "longpoll", <ide> websocket: "websocket" <ide> } <ide> <add>// wraps value in closure or returns closure <add>let closure = (value) => { <add> if(typeof value === "function"){ <add> return value <add> } else { <add> let closure = function(){ return value } <add> return closure <add> } <add>} <add> <add>/** <add> * Initializes the Push <add> * @param {Channel} channel - The Channel <add> * @param {string} event - The event, for example `"phx_join"` <add> * @param {Object} payload - The payload, for example `{user_id: 123}` <add> * @param {number} timeout - The push timeout in milliseconds <add> */ <ide> class Push { <del> <del> // Initializes the Push <del> // <del> // channel - The Channel <del> // event - The event, for example `"phx_join"` <del> // payload - The payload, for example `{user_id: 123}` <del> // timeout - The push timeout in milliseconds <del> // <ide> constructor(channel, event, payload, timeout){ <ide> this.channel = channel <ide> this.event = event <del> this.payload = payload || {} <add> this.payload = payload || function(){ return {} } <ide> this.receivedResp = null <ide> this.timeout = timeout <ide> this.timeoutTimer = null <ide> this.sent = false <ide> } <ide> <add> /** <add> * <add> * @param {number} timeout <add> */ <ide> resend(timeout){ <ide> this.timeout = timeout <del> this.cancelRefEvent() <del> this.ref = null <del> this.refEvent = null <del> this.receivedResp = null <del> this.sent = false <add> this.reset() <ide> this.send() <ide> } <ide> <add> /** <add> * <add> */ <ide> send(){ if(this.hasReceived("timeout")){ return } <ide> this.startTimeout() <ide> this.sent = true <ide> this.channel.socket.push({ <ide> topic: this.channel.topic, <ide> event: this.event, <del> payload: this.payload, <del> ref: this.ref <del> }) <del> } <del> <add> payload: this.payload(), <add> ref: this.ref, <add> join_ref: this.channel.joinRef() <add> }) <add> } <add> <add> /** <add> * <add> * @param {*} status <add> * @param {*} callback <add> */ <ide> receive(status, callback){ <ide> if(this.hasReceived(status)){ <ide> callback(this.receivedResp.response) <ide> return this <ide> } <ide> <del> <del> // private <del> <add> /** <add> * @private <add> */ <add> reset(){ <add> this.cancelRefEvent() <add> this.ref = null <add> this.refEvent = null <add> this.receivedResp = null <add> this.sent = false <add> } <add> <add> /** <add> * @private <add> */ <ide> matchReceive({status, response, ref}){ <ide> this.recHooks.filter( h => h.status === status ) <del> .forEach( h => h.callback(response) ) <del> } <del> <add> .forEach( h => h.callback(response) ) <add> } <add> <add> /** <add> * @private <add> */ <ide> cancelRefEvent(){ if(!this.refEvent){ return } <ide> this.channel.off(this.refEvent) <ide> } <ide> <add> /** <add> * @private <add> */ <ide> cancelTimeout(){ <ide> clearTimeout(this.timeoutTimer) <ide> this.timeoutTimer = null <ide> } <ide> <del> startTimeout(){ if(this.timeoutTimer){ return } <add> /** <add> * @private <add> */ <add> startTimeout(){ if(this.timeoutTimer){ this.cancelTimeout() } <ide> this.ref = this.channel.socket.makeRef() <ide> this.refEvent = this.channel.replyEventName(this.ref) <ide> <ide> }, this.timeout) <ide> } <ide> <add> /** <add> * @private <add> */ <ide> hasReceived(status){ <ide> return this.receivedResp && this.receivedResp.status === status <ide> } <ide> <add> /** <add> * @private <add> */ <ide> trigger(status, response){ <ide> this.channel.trigger(this.refEvent, {status, response}) <ide> } <ide> } <ide> <add>/** <add> * <add> * @param {string} topic <add> * @param {(Object|function)} params <add> * @param {Socket} socket <add> */ <ide> export class Channel { <ide> constructor(topic, params, socket) { <ide> this.state = CHANNEL_STATES.closed <ide> this.topic = topic <del> this.params = params || {} <add> this.params = closure(params || {}) <ide> this.socket = socket <ide> this.bindings = [] <add> this.bindingRef = 0 <ide> this.timeout = this.socket.timeout <ide> this.joinedOnce = false <ide> this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.params, this.timeout) <ide> this.pushBuffer = [] <del> this.rejoinTimer = new Timer( <del> () => this.rejoinUntilConnected(), <del> this.socket.reconnectAfterMs <del> ) <add> <add> this.rejoinTimer = new Timer(() => { <add> if(this.socket.isConnected()){ this.rejoin() } <add> }, this.socket.rejoinAfterMs) <add> this.socket.onError(() => this.rejoinTimer.reset()) <add> this.socket.onOpen(() => { <add> this.rejoinTimer.reset() <add> if(this.isErrored()){ this.rejoin() } <add> }) <ide> this.joinPush.receive("ok", () => { <ide> this.state = CHANNEL_STATES.joined <ide> this.rejoinTimer.reset() <ide> this.pushBuffer.forEach( pushEvent => pushEvent.send() ) <ide> this.pushBuffer = [] <ide> }) <del> this.onClose( () => { <del> this.socket.log("channel", `close ${this.topic} ${this.joinRef()}`) <add> this.joinPush.receive("error", () => { <add> this.state = CHANNEL_STATES.errored <add> if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } <add> }) <add> this.onClose(() => { <add> this.rejoinTimer.reset() <add> if(this.socket.hasLogger()) this.socket.log("channel", `close ${this.topic} ${this.joinRef()}`) <ide> this.state = CHANNEL_STATES.closed <ide> this.socket.remove(this) <ide> }) <del> this.onError( reason => { <del> this.socket.log("channel", `error ${this.topic}`, reason) <add> this.onError(reason => { <add> if(this.socket.hasLogger()) this.socket.log("channel", `error ${this.topic}`, reason) <add> if(this.isJoining()){ this.joinPush.reset() } <ide> this.state = CHANNEL_STATES.errored <del> this.rejoinTimer.scheduleTimeout() <add> if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } <ide> }) <ide> this.joinPush.receive("timeout", () => { <del> if(this.state !== CHANNEL_STATES.joining){ return } <del> <del> this.socket.log("channel", `timeout ${this.topic}`, this.joinPush.timeout) <add> if(this.socket.hasLogger()) this.socket.log("channel", `timeout ${this.topic} (${this.joinRef()})`, this.joinPush.timeout) <add> let leavePush = new Push(this, CHANNEL_EVENTS.leave, closure({}), this.timeout) <add> leavePush.send() <ide> this.state = CHANNEL_STATES.errored <del> this.rejoinTimer.scheduleTimeout() <add> this.joinPush.reset() <add> if(this.socket.isConnected()){ this.rejoinTimer.scheduleTimeout() } <ide> }) <ide> this.on(CHANNEL_EVENTS.reply, (payload, ref) => { <ide> this.trigger(this.replyEventName(ref), payload) <ide> }) <ide> } <ide> <del> rejoinUntilConnected(){ <del> this.rejoinTimer.scheduleTimeout() <del> if(this.socket.isConnected()){ <del> this.rejoin() <del> } <del> } <del> <add> /** <add> * Join the channel <add> * @param {integer} timeout <add> * @returns {Push} <add> */ <ide> join(timeout = this.timeout){ <ide> if(this.joinedOnce){ <del> throw(`tried to join multiple times. 'join' can only be called a single time per channel instance`) <add> throw new Error(`tried to join multiple times. 'join' can only be called a single time per channel instance`) <ide> } else { <add> this.timeout = timeout <ide> this.joinedOnce = true <del> this.rejoin(timeout) <add> this.rejoin() <ide> return this.joinPush <ide> } <ide> } <ide> <del> onClose(callback){ this.on(CHANNEL_EVENTS.close, callback) } <del> <add> /** <add> * Hook into channel close <add> * @param {Function} callback <add> */ <add> onClose(callback){ <add> this.on(CHANNEL_EVENTS.close, callback) <add> } <add> <add> /** <add> * Hook into channel errors <add> * @param {Function} callback <add> */ <ide> onError(callback){ <del> this.on(CHANNEL_EVENTS.error, reason => callback(reason) ) <del> } <del> <del> on(event, callback){ this.bindings.push({event, callback}) } <del> <del> off(event){ this.bindings = this.bindings.filter( bind => bind.event !== event ) } <del> <del> canPush(){ return this.socket.isConnected() && this.state === CHANNEL_STATES.joined } <del> <add> return this.on(CHANNEL_EVENTS.error, reason => callback(reason)) <add> } <add> <add> /** <add> * Subscribes on channel events <add> * <add> * Subscription returns a ref counter, which can be used later to <add> * unsubscribe the exact event listener <add> * <add> * @example <add> * const ref1 = channel.on("event", do_stuff) <add> * const ref2 = channel.on("event", do_other_stuff) <add> * channel.off("event", ref1) <add> * // Since unsubscription, do_stuff won't fire, <add> * // while do_other_stuff will keep firing on the "event" <add> * <add> * @param {string} event <add> * @param {Function} callback <add> * @returns {integer} ref <add> */ <add> on(event, callback){ <add> let ref = this.bindingRef++ <add> this.bindings.push({event, ref, callback}) <add> return ref <add> } <add> <add> /** <add> * @param {string} event <add> * @param {integer} ref <add> */ <add> off(event, ref){ <add> this.bindings = this.bindings.filter((bind) => { <add> return !(bind.event === event && (typeof ref === "undefined" || ref === bind.ref)) <add> }) <add> } <add> <add> /** <add> * @private <add> */ <add> canPush(){ return this.socket.isConnected() && this.isJoined() } <add> <add> /** <add> * @param {string} event <add> * @param {Object} payload <add> * @param {number} [timeout] <add> * @returns {Push} <add> */ <ide> push(event, payload, timeout = this.timeout){ <ide> if(!this.joinedOnce){ <del> throw(`tried to push '${event}' to '${this.topic}' before joining. Use channel.join() before pushing events`) <del> } <del> let pushEvent = new Push(this, event, payload, timeout) <add> throw new Error(`tried to push '${event}' to '${this.topic}' before joining. Use channel.join() before pushing events`) <add> } <add> let pushEvent = new Push(this, event, function(){ return payload }, timeout) <ide> if(this.canPush()){ <ide> pushEvent.send() <ide> } else { <ide> return pushEvent <ide> } <ide> <del> // Leaves the channel <del> // <del> // Unsubscribes from server events, and <del> // instructs channel to terminate on server <del> // <del> // Triggers onClose() hooks <del> // <del> // To receive leave acknowledgements, use the a `receive` <del> // hook to bind to the server ack, ie: <del> // <del> // channel.leave().receive("ok", () => alert("left!") ) <del> // <add> /** Leaves the channel <add> * <add> * Unsubscribes from server events, and <add> * instructs channel to terminate on server <add> * <add> * Triggers onClose() hooks <add> * <add> * To receive leave acknowledgements, use the a `receive` <add> * hook to bind to the server ack, ie: <add> * <add> * @example <add> * channel.leave().receive("ok", () => alert("left!") ) <add> * <add> * @param {integer} timeout <add> * @returns {Push} <add> */ <ide> leave(timeout = this.timeout){ <add> this.rejoinTimer.reset() <add> this.joinPush.cancelTimeout() <add> <ide> this.state = CHANNEL_STATES.leaving <ide> let onClose = () => { <del> this.socket.log("channel", `leave ${this.topic}`) <del> this.trigger(CHANNEL_EVENTS.close, "leave", this.joinRef()) <del> } <del> let leavePush = new Push(this, CHANNEL_EVENTS.leave, {}, timeout) <add> if(this.socket.hasLogger()) this.socket.log("channel", `leave ${this.topic}`) <add> this.trigger(CHANNEL_EVENTS.close, "leave") <add> } <add> let leavePush = new Push(this, CHANNEL_EVENTS.leave, closure({}), timeout) <ide> leavePush.receive("ok", () => onClose() ) <del> .receive("timeout", () => onClose() ) <add> .receive("timeout", () => onClose() ) <ide> leavePush.send() <ide> if(!this.canPush()){ leavePush.trigger("ok", {}) } <ide> <ide> return leavePush <ide> } <ide> <del> // Overridable message hook <del> // <del> // Receives all events for specialized message handling <del> onMessage(event, payload, ref){} <del> <del> // private <del> <del> isMember(topic){ return this.topic === topic } <del> <add> /** <add> * Overridable message hook <add> * <add> * Receives all events for specialized message handling <add> * before dispatching to the channel callbacks. <add> * <add> * Must return the payload, modified or unmodified <add> * @param {string} event <add> * @param {Object} payload <add> * @param {integer} ref <add> * @returns {Object} <add> */ <add> onMessage(event, payload, ref){ return payload } <add> <add> /** <add> * @private <add> */ <add> isLifecycleEvent(event) { return CHANNEL_LIFECYCLE_EVENTS.indexOf(event) >= 0 } <add> <add> /** <add> * @private <add> */ <add> isMember(topic, event, payload, joinRef){ <add> if(this.topic !== topic){ return false } <add> <add> if(joinRef && joinRef !== this.joinRef() && this.isLifecycleEvent(event)){ <add> if (this.socket.hasLogger()) this.socket.log("channel", "dropping outdated message", {topic, event, payload, joinRef}) <add> return false <add> } else { <add> return true <add> } <add> } <add> <add> /** <add> * @private <add> */ <ide> joinRef(){ return this.joinPush.ref } <ide> <add> /** <add> * @private <add> */ <ide> sendJoin(timeout){ <ide> this.state = CHANNEL_STATES.joining <ide> this.joinPush.resend(timeout) <ide> } <ide> <del> rejoin(timeout = this.timeout){ if(this.state === CHANNEL_STATES.leaving){ return } <add> /** <add> * @private <add> */ <add> rejoin(timeout = this.timeout){ if(this.isLeaving()){ return } <ide> this.sendJoin(timeout) <ide> } <ide> <del> trigger(event, payload, ref){ <del> let {close, error, leave, join} = CHANNEL_EVENTS <del> if(ref && [close, error, leave, join].indexOf(event) >= 0 && ref !== this.joinRef()){ <del> return <del> } <del> this.onMessage(event, payload, ref) <del> this.bindings.filter( bind => bind.event === event) <del> .map( bind => bind.callback(payload, ref)) <del> } <del> <add> /** <add> * @private <add> */ <add> trigger(event, payload, ref, joinRef){ <add> let handledPayload = this.onMessage(event, payload, ref, joinRef) <add> if(payload && !handledPayload){ throw new Error("channel onMessage callbacks must return the payload, modified or unmodified") } <add> <add> for (let i = 0; i < this.bindings.length; i++) { <add> const bind = this.bindings[i] <add> if(bind.event !== event){ continue } <add> bind.callback(handledPayload, ref, joinRef || this.joinRef()) <add> } <add> } <add> <add> /** <add> * @private <add> */ <ide> replyEventName(ref){ return `chan_reply_${ref}` } <add> <add> /** <add> * @private <add> */ <add> isClosed() { return this.state === CHANNEL_STATES.closed } <add> <add> /** <add> * @private <add> */ <add> isErrored(){ return this.state === CHANNEL_STATES.errored } <add> <add> /** <add> * @private <add> */ <add> isJoined() { return this.state === CHANNEL_STATES.joined } <add> <add> /** <add> * @private <add> */ <add> isJoining(){ return this.state === CHANNEL_STATES.joining } <add> <add> /** <add> * @private <add> */ <add> isLeaving(){ return this.state === CHANNEL_STATES.leaving } <ide> } <ide> <add>/* The default serializer for encoding and decoding messages */ <add>export let Serializer = { <add> encode(msg, callback){ <add> let payload = [ <add> msg.join_ref, msg.ref, msg.topic, msg.event, msg.payload <add> ] <add> return callback(JSON.stringify(payload)) <add> }, <add> <add> decode(rawPayload, callback){ <add> let [join_ref, ref, topic, event, payload] = JSON.parse(rawPayload) <add> <add> return callback({join_ref, ref, topic, event, payload}) <add> } <add>} <add> <add> <add>/** Initializes the Socket <add> * <add> * <add> * For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim) <add> * <add> * @param {string} endPoint - The string WebSocket endpoint, ie, `"ws://example.com/socket"`, <add> * `"wss://example.com"` <add> * `"/socket"` (inherited host & protocol) <add> * @param {Object} [opts] - Optional configuration <add> * @param {string} [opts.transport] - The Websocket Transport, for example WebSocket or Phoenix.LongPoll. <add> * <add> * Defaults to WebSocket with automatic LongPoll fallback. <add> * @param {Function} [opts.encode] - The function to encode outgoing messages. <add> * <add> * Defaults to JSON encoder. <add> * <add> * @param {Function} [opts.decode] - The function to decode incoming messages. <add> * <add> * Defaults to JSON: <add> * <add> * ```javascript <add> * (payload, callback) => callback(JSON.parse(payload)) <add> * ``` <add> * <add> * @param {number} [opts.timeout] - The default timeout in milliseconds to trigger push timeouts. <add> * <add> * Defaults `DEFAULT_TIMEOUT` <add> * @param {number} [opts.heartbeatIntervalMs] - The millisec interval to send a heartbeat message <add> * @param {number} [opts.reconnectAfterMs] - The optional function that returns the millsec <add> * socket reconnect interval. <add> * <add> * Defaults to stepped backoff of: <add> * <add> * ```javascript <add> * function(tries){ <add> * return [10, 50, 100, 150, 200, 250, 500, 1000, 2000][tries - 1] || 5000 <add> * } <add> * ```` <add> * <add> * @param {number} [opts.rejoinAfterMs] - The optional function that returns the millsec <add> * rejoin interval for individual channels. <add> * <add> * ```javascript <add> * function(tries){ <add> * return [1000, 2000, 5000][tries - 1] || 10000 <add> * } <add> * ```` <add> * <add> * @param {Function} [opts.logger] - The optional function for specialized logging, ie: <add> * <add> * ```javascript <add> * function(kind, msg, data) { <add> * console.log(`${kind}: ${msg}`, data) <add> * } <add> * ``` <add> * <add> * @param {number} [opts.longpollerTimeout] - The maximum timeout of a long poll AJAX request. <add> * <add> * Defaults to 20s (double the server long poll timer). <add> * <add> * @param {{Object|function)} [opts.params] - The optional params to pass when connecting <add> * @param {string} [opts.binaryType] - The binary type to use for binary WebSocket frames. <add> * <add> * Defaults to "arraybuffer" <add> * <add> * @param {vsn} [opts.vsn] - The serializer's protocol version to send on connect. <add> * <add> * Defaults to DEFAULT_VSN. <add>*/ <ide> export class Socket { <del> <del> // Initializes the Socket <del> // <del> // endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws", <del> // "wss://example.com" <del> // "/ws" (inherited host & protocol) <del> // opts - Optional configuration <del> // transport - The Websocket Transport, for example WebSocket or Phoenix.LongPoll. <del> // Defaults to WebSocket with automatic LongPoll fallback. <del> // timeout - The default timeout in milliseconds to trigger push timeouts. <del> // Defaults `DEFAULT_TIMEOUT` <del> // heartbeatIntervalMs - The millisec interval to send a heartbeat message <del> // reconnectAfterMs - The optional function that returns the millsec <del> // reconnect interval. Defaults to stepped backoff of: <del> // <del> // function(tries){ <del> // return [1000, 5000, 10000][tries - 1] || 10000 <del> // } <del> // <del> // logger - The optional function for specialized logging, ie: <del> // `logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) } <del> // <del> // longpollerTimeout - The maximum timeout of a long poll AJAX request. <del> // Defaults to 20s (double the server long poll timer). <del> // <del> // params - The optional params to pass when connecting <del> // <del> // For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim) <del> // <ide> constructor(endPoint, opts = {}){ <ide> this.stateChangeCallbacks = {open: [], close: [], error: [], message: []} <ide> this.channels = [] <ide> this.sendBuffer = [] <ide> this.ref = 0 <ide> this.timeout = opts.timeout || DEFAULT_TIMEOUT <del> this.transport = opts.transport || window.WebSocket || LongPoll <del> this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000 <del> this.reconnectAfterMs = opts.reconnectAfterMs || function(tries){ <del> return [1000, 2000, 5000, 10000][tries - 1] || 10000 <add> this.transport = opts.transport || global.WebSocket || LongPoll <add> this.defaultEncoder = Serializer.encode <add> this.defaultDecoder = Serializer.decode <add> this.closeWasClean = false <add> this.unloaded = false <add> this.binaryType = opts.binaryType || "arraybuffer" <add> if(this.transport !== LongPoll){ <add> this.encode = opts.encode || this.defaultEncoder <add> this.decode = opts.decode || this.defaultDecoder <add> } else { <add> this.encode = this.defaultEncoder <add> this.decode = this.defaultDecoder <add> } <add> if(phxWindow && phxWindow.addEventListener){ <add> phxWindow.addEventListener("beforeunload", e => { <add> if(this.conn){ <add> this.unloaded = true <add> this.abnormalClose("unloaded") <add> } <add> }) <add> } <add> this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000 <add> this.rejoinAfterMs = (tries) => { <add> if(opts.rejoinAfterMs){ <add> return opts.rejoinAfterMs(tries) <add> } else { <add> return [1000, 2000, 5000][tries - 1] || 10000 <ide> } <del> this.logger = opts.logger || function(){} // noop <add> } <add> this.reconnectAfterMs = (tries) => { <add> if(this.unloaded){ return 100 } <add> if(opts.reconnectAfterMs){ <add> return opts.reconnectAfterMs(tries) <add> } else { <add> return [10, 50, 100, 150, 200, 250, 500, 1000, 2000][tries - 1] || 5000 <add> } <add> } <add> this.logger = opts.logger || null <ide> this.longpollerTimeout = opts.longpollerTimeout || 20000 <del> this.params = opts.params || {} <add> this.params = closure(opts.params || {}) <ide> this.endPoint = `${endPoint}/${TRANSPORTS.websocket}` <add> this.vsn = opts.vsn || DEFAULT_VSN <add> this.heartbeatTimer = null <add> this.pendingHeartbeatRef = null <ide> this.reconnectTimer = new Timer(() => { <del> this.disconnect(() => this.connect()) <add> this.teardown(() => this.connect()) <ide> }, this.reconnectAfterMs) <ide> } <ide> <add> /** <add> * Returns the socket protocol <add> * <add> * @returns {string} <add> */ <ide> protocol(){ return location.protocol.match(/^https/) ? "wss" : "ws" } <ide> <add> /** <add> * The fully qualifed socket url <add> * <add> * @returns {string} <add> */ <ide> endPointURL(){ <ide> let uri = Ajax.appendParams( <del> Ajax.appendParams(this.endPoint, this.params), {vsn: VSN}) <add> Ajax.appendParams(this.endPoint, this.params()), {vsn: this.vsn}) <ide> if(uri.charAt(0) !== "/"){ return uri } <ide> if(uri.charAt(1) === "/"){ return `${this.protocol()}:${uri}` } <ide> <ide> return `${this.protocol()}://${location.host}${uri}` <ide> } <ide> <add> /** <add> * Disconnects the socket <add> * <add> * See https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent#Status_codes for valid status codes. <add> * <add> * @param {Function} callback - Optional callback which is called after socket is disconnected. <add> * @param {integer} code - A status code for disconnection (Optional). <add> * @param {string} reason - A textual description of the reason to disconnect. (Optional) <add> */ <ide> disconnect(callback, code, reason){ <add> this.closeWasClean = true <add> this.reconnectTimer.reset() <add> this.teardown(callback, code, reason) <add> } <add> <add> /** <add> * <add> * @param {Object} params - The params to send when connecting, for example `{user_id: userToken}` <add> * <add> * Passing params to connect is deprecated; pass them in the Socket constructor instead: <add> * `new Socket("/socket", {params: {user_id: userToken}})`. <add> */ <add> connect(params){ <add> if(params){ <add> console && console.log("passing params to connect is deprecated. Instead pass :params to the Socket constructor") <add> this.params = closure(params) <add> } <add> if(this.conn){ return } <add> this.closeWasClean = false <add> this.conn = new this.transport(this.endPointURL()) <add> this.conn.binaryType = this.binaryType <add> this.conn.timeout = this.longpollerTimeout <add> this.conn.onopen = () => this.onConnOpen() <add> this.conn.onerror = error => this.onConnError(error) <add> this.conn.onmessage = event => this.onConnMessage(event) <add> this.conn.onclose = event => this.onConnClose(event) <add> } <add> <add> /** <add> * Logs the message. Override `this.logger` for specialized logging. noops by default <add> * @param {string} kind <add> * @param {string} msg <add> * @param {Object} data <add> */ <add> log(kind, msg, data){ this.logger(kind, msg, data) } <add> <add> /** <add> * Returns true if a logger has been set on this socket. <add> */ <add> hasLogger(){ return this.logger !== null } <add> <add> /** <add> * Registers callbacks for connection open events <add> * <add> * @example socket.onOpen(function(){ console.info("the socket was opened") }) <add> * <add> * @param {Function} callback <add> */ <add> onOpen(callback){ this.stateChangeCallbacks.open.push(callback) } <add> <add> /** <add> * Registers callbacks for connection close events <add> * @param {Function} callback <add> */ <add> onClose(callback){ this.stateChangeCallbacks.close.push(callback) } <add> <add> /** <add> * Registers callbacks for connection error events <add> * <add> * @example socket.onError(function(error){ alert("An error occurred") }) <add> * <add> * @param {Function} callback <add> */ <add> onError(callback){ this.stateChangeCallbacks.error.push(callback) } <add> <add> /** <add> * Registers callbacks for connection message events <add> * @param {Function} callback <add> */ <add> onMessage(callback){ this.stateChangeCallbacks.message.push(callback) } <add> <add> /** <add> * @private <add> */ <add> onConnOpen(){ <add> if (this.hasLogger()) this.log("transport", `connected to ${this.endPointURL()}`) <add> this.unloaded = false <add> this.closeWasClean = false <add> this.flushSendBuffer() <add> this.reconnectTimer.reset() <add> this.resetHeartbeat() <add> this.stateChangeCallbacks.open.forEach( callback => callback() ) <add> } <add> <add> /** <add> * @private <add> */ <add> <add> resetHeartbeat(){ if(this.conn && this.conn.skipHeartbeat){ return } <add> this.pendingHeartbeatRef = null <add> clearInterval(this.heartbeatTimer) <add> this.heartbeatTimer = setInterval(() => this.sendHeartbeat(), this.heartbeatIntervalMs) <add> } <add> <add> teardown(callback, code, reason){ <ide> if(this.conn){ <ide> this.conn.onclose = function(){} // noop <ide> if(code){ this.conn.close(code, reason || "") } else { this.conn.close() } <ide> callback && callback() <ide> } <ide> <del> // params - The params to send when connecting, for example `{user_id: userToken}` <del> connect(params){ <del> if(params){ <del> console && console.log("passing params to connect is deprecated. Instead pass :params to the Socket constructor") <del> this.params = params <del> } <del> if(this.conn){ return } <del> <del> this.conn = new this.transport(this.endPointURL()) <del> this.conn.timeout = this.longpollerTimeout <del> this.conn.onopen = () => this.onConnOpen() <del> this.conn.onerror = error => this.onConnError(error) <del> this.conn.onmessage = event => this.onConnMessage(event) <del> this.conn.onclose = event => this.onConnClose(event) <del> } <del> <del> // Logs the message. Override `this.logger` for specialized logging. noops by default <del> log(kind, msg, data){ this.logger(kind, msg, data) } <del> <del> // Registers callbacks for connection state change events <del> // <del> // Examples <del> // <del> // socket.onError(function(error){ alert("An error occurred") }) <del> // <del> onOpen (callback){ this.stateChangeCallbacks.open.push(callback) } <del> onClose (callback){ this.stateChangeCallbacks.close.push(callback) } <del> onError (callback){ this.stateChangeCallbacks.error.push(callback) } <del> onMessage (callback){ this.stateChangeCallbacks.message.push(callback) } <del> <del> onConnOpen(){ <del> this.log("transport", `connected to ${this.endPointURL()}`, this.transport.prototype) <del> this.flushSendBuffer() <del> this.reconnectTimer.reset() <del> if(!this.conn.skipHeartbeat){ <del> clearInterval(this.heartbeatTimer) <del> this.heartbeatTimer = setInterval(() => this.sendHeartbeat(), this.heartbeatIntervalMs) <del> } <del> this.stateChangeCallbacks.open.forEach( callback => callback() ) <del> } <del> <ide> onConnClose(event){ <del> this.log("transport", "close", event) <add> if (this.hasLogger()) this.log("transport", "close", event) <ide> this.triggerChanError() <ide> clearInterval(this.heartbeatTimer) <del> this.reconnectTimer.scheduleTimeout() <add> if(!this.closeWasClean){ <add> this.reconnectTimer.scheduleTimeout() <add> } <ide> this.stateChangeCallbacks.close.forEach( callback => callback(event) ) <ide> } <ide> <add> /** <add> * @private <add> */ <ide> onConnError(error){ <del> this.log("transport", error) <add> if (this.hasLogger()) this.log("transport", error) <ide> this.triggerChanError() <ide> this.stateChangeCallbacks.error.forEach( callback => callback(error) ) <ide> } <ide> <add> /** <add> * @private <add> */ <ide> triggerChanError(){ <del> this.channels.forEach( channel => channel.trigger(CHANNEL_EVENTS.error) ) <del> } <del> <add> this.channels.forEach( channel => { <add> if(!(channel.isErrored() || channel.isLeaving() || channel.isClosed())){ <add> channel.trigger(CHANNEL_EVENTS.error) <add> } <add> }) <add> } <add> <add> /** <add> * @returns {string} <add> */ <ide> connectionState(){ <ide> switch(this.conn && this.conn.readyState){ <ide> case SOCKET_STATES.connecting: return "connecting" <ide> } <ide> } <ide> <add> /** <add> * @returns {boolean} <add> */ <ide> isConnected(){ return this.connectionState() === "open" } <ide> <add> /** <add> * @param {Channel} <add> */ <ide> remove(channel){ <ide> this.channels = this.channels.filter(c => c.joinRef() !== channel.joinRef()) <ide> } <ide> <add> /** <add> * Initiates a new channel for the given topic <add> * <add> * @param {string} topic <add> * @param {Object} chanParams - Parameters for the channel <add> * @returns {Channel} <add> */ <ide> channel(topic, chanParams = {}){ <ide> let chan = new Channel(topic, chanParams, this) <ide> this.channels.push(chan) <ide> return chan <ide> } <ide> <add> /** <add> * @param {Object} data <add> */ <ide> push(data){ <del> let {topic, event, payload, ref} = data <del> let callback = () => this.conn.send(JSON.stringify(data)) <del> this.log("push", `${topic} ${event} (${ref})`, payload) <add> if (this.hasLogger()) { <add> let {topic, event, payload, ref, join_ref} = data <add> this.log("push", `${topic} ${event} (${join_ref}, ${ref})`, payload) <add> } <add> <ide> if(this.isConnected()){ <del> callback() <del> } <del> else { <del> this.sendBuffer.push(callback) <del> } <del> } <del> <del> // Return the next message ref, accounting for overflows <add> this.encode(data, result => this.conn.send(result)) <add> } else { <add> this.sendBuffer.push(() => this.encode(data, result => this.conn.send(result))) <add> } <add> } <add> <add> /** <add> * Return the next message ref, accounting for overflows <add> * @returns {string} <add> */ <ide> makeRef(){ <ide> let newRef = this.ref + 1 <ide> if(newRef === this.ref){ this.ref = 0 } else { this.ref = newRef } <ide> } <ide> <ide> sendHeartbeat(){ if(!this.isConnected()){ return } <del> this.push({topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef()}) <add> if(this.pendingHeartbeatRef){ <add> this.pendingHeartbeatRef = null <add> if (this.hasLogger()) this.log("transport", "heartbeat timeout. Attempting to re-establish connection") <add> this.abnormalClose("heartbeat timeout") <add> return <add> } <add> this.pendingHeartbeatRef = this.makeRef() <add> this.push({topic: "phoenix", event: "heartbeat", payload: {}, ref: this.pendingHeartbeatRef}) <add> } <add> <add> abnormalClose(reason){ <add> this.closeWasClean = false <add> this.conn.close(WS_CLOSE_NORMAL, reason) <ide> } <ide> <ide> flushSendBuffer(){ <ide> } <ide> <ide> onConnMessage(rawMessage){ <del> let msg = JSON.parse(rawMessage.data) <del> let {topic, event, payload, ref} = msg <del> this.log("receive", `${payload.status || ""} ${topic} ${event} ${ref && "(" + ref + ")" || ""}`, payload) <del> this.channels.filter( channel => channel.isMember(topic) ) <del> .forEach( channel => channel.trigger(event, payload, ref) ) <del> this.stateChangeCallbacks.message.forEach( callback => callback(msg) ) <add> this.decode(rawMessage.data, msg => { <add> let {topic, event, payload, ref, join_ref} = msg <add> if(ref && ref === this.pendingHeartbeatRef){ this.pendingHeartbeatRef = null } <add> <add> if (this.hasLogger()) this.log("receive", `${payload.status || ""} ${topic} ${event} ${ref && "(" + ref + ")" || ""}`, payload) <add> <add> for (let i = 0; i < this.channels.length; i++) { <add> const channel = this.channels[i] <add> if(!channel.isMember(topic, event, payload, join_ref)){ continue } <add> channel.trigger(event, payload, ref, join_ref) <add> } <add> <add> for (let i = 0; i < this.stateChangeCallbacks.message.length; i++) { <add> this.stateChangeCallbacks.message[i](msg) <add> } <add> }) <ide> } <ide> } <ide> <ide> <ide> switch(status){ <ide> case 200: <del> messages.forEach( msg => this.onmessage({data: JSON.stringify(msg)}) ) <add> messages.forEach(msg => this.onmessage({data: msg})) <ide> this.poll() <ide> break <ide> case 204: <ide> this.onerror() <ide> this.closeAndRetry() <ide> break <del> default: throw(`unhandled poll status ${status}`) <add> default: throw new Error(`unhandled poll status ${status}`) <ide> } <ide> }) <ide> } <ide> send(body){ <ide> Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), (resp) => { <ide> if(!resp || resp.status !== 200){ <del> this.onerror(status) <add> this.onerror(resp && resp.status) <ide> this.closeAndRetry() <ide> } <ide> }) <ide> } <ide> } <ide> <del> <ide> export class Ajax { <ide> <ide> static request(method, endPoint, accept, body, timeout, ontimeout, callback){ <del> if(window.XDomainRequest){ <add> if(global.XDomainRequest){ <ide> let req = new XDomainRequest() // IE8, IE9 <ide> this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) <ide> } else { <del> let req = window.XMLHttpRequest ? <del> new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari <del> new ActiveXObject("Microsoft.XMLHTTP") // IE6, IE5 <add> let req = global.XMLHttpRequest ? <add> new global.XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari <add> new ActiveXObject("Microsoft.XMLHTTP") // IE6, IE5 <ide> this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) <ide> } <ide> } <ide> } <ide> <ide> static xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback){ <add> req.open(method, endPoint, true) <ide> req.timeout = timeout <del> req.open(method, endPoint, true) <ide> req.setRequestHeader("Content-Type", accept) <ide> req.onerror = () => { callback && callback(null) } <ide> req.onreadystatechange = () => { <ide> } <ide> <ide> static parseJSON(resp){ <del> return (resp && resp !== "") ? <del> JSON.parse(resp) : <del> null <add> if(!resp || resp === ""){ return null } <add> <add> try { <add> return JSON.parse(resp) <add> } catch(e) { <add> console && console.log("failed to parse JSON response", resp) <add> return null <add> } <ide> } <ide> <ide> static serialize(obj, parentKey){ <del> let queryStr = []; <add> let queryStr = [] <ide> for(var key in obj){ if(!obj.hasOwnProperty(key)){ continue } <ide> let paramKey = parentKey ? `${parentKey}[${key}]` : key <ide> let paramVal = obj[key] <ide> <ide> Ajax.states = {complete: 4} <ide> <del> <del> <del>export var Presence = { <del> <del> syncState(state, newState, onJoin, onLeave){ <add>/** <add> * Initializes the Presence <add> * @param {Channel} channel - The Channel <add> * @param {Object} opts - The options, <add> * for example `{events: {state: "state", diff: "diff"}}` <add> */ <add>export class Presence { <add> <add> constructor(channel, opts = {}){ <add> let events = opts.events || {state: "presence_state", diff: "presence_diff"} <add> this.state = {} <add> this.pendingDiffs = [] <add> this.channel = channel <add> this.joinRef = null <add> this.caller = { <add> onJoin: function(){}, <add> onLeave: function(){}, <add> onSync: function(){} <add> } <add> <add> this.channel.on(events.state, newState => { <add> let {onJoin, onLeave, onSync} = this.caller <add> <add> this.joinRef = this.channel.joinRef() <add> this.state = Presence.syncState(this.state, newState, onJoin, onLeave) <add> <add> this.pendingDiffs.forEach(diff => { <add> this.state = Presence.syncDiff(this.state, diff, onJoin, onLeave) <add> }) <add> this.pendingDiffs = [] <add> onSync() <add> }) <add> <add> this.channel.on(events.diff, diff => { <add> let {onJoin, onLeave, onSync} = this.caller <add> <add> if(this.inPendingSyncState()){ <add> this.pendingDiffs.push(diff) <add> } else { <add> this.state = Presence.syncDiff(this.state, diff, onJoin, onLeave) <add> onSync() <add> } <add> }) <add> } <add> <add> onJoin(callback){ this.caller.onJoin = callback } <add> <add> onLeave(callback){ this.caller.onLeave = callback } <add> <add> onSync(callback){ this.caller.onSync = callback } <add> <add> list(by){ return Presence.list(this.state, by) } <add> <add> inPendingSyncState(){ <add> return !this.joinRef || (this.joinRef !== this.channel.joinRef()) <add> } <add> <add> // lower-level public static API <add> <add> /** <add> * Used to sync the list of presences on the server <add> * with the client's state. An optional `onJoin` and `onLeave` callback can <add> * be provided to react to changes in the client's local presences across <add> * disconnects and reconnects with the server. <add> * <add> * @returns {Presence} <add> */ <add> static syncState(currentState, newState, onJoin, onLeave){ <add> let state = this.clone(currentState) <ide> let joins = {} <ide> let leaves = {} <ide> <ide> this.map(state, (key, presence) => { <ide> if(!newState[key]){ <del> leaves[key] = this.clone(presence) <add> leaves[key] = presence <ide> } <ide> }) <ide> this.map(newState, (key, newPresence) => { <ide> joins[key] = newPresence <ide> } <ide> }) <del> this.syncDiff(state, {joins: joins, leaves: leaves}, onJoin, onLeave) <del> }, <del> <del> syncDiff(state, {joins, leaves}, onJoin, onLeave){ <add> return this.syncDiff(state, {joins: joins, leaves: leaves}, onJoin, onLeave) <add> } <add> <add> /** <add> * <add> * Used to sync a diff of presence join and leave <add> * events from the server, as they happen. Like `syncState`, `syncDiff` <add> * accepts optional `onJoin` and `onLeave` callbacks to react to a user <add> * joining or leaving from a device. <add> * <add> * @returns {Presence} <add> */ <add> static syncDiff(currentState, {joins, leaves}, onJoin, onLeave){ <add> let state = this.clone(currentState) <ide> if(!onJoin){ onJoin = function(){} } <ide> if(!onLeave){ onLeave = function(){} } <ide> <ide> let currentPresence = state[key] <ide> state[key] = newPresence <ide> if(currentPresence){ <del> state[key].metas.unshift(...currentPresence.metas) <add> let joinedRefs = state[key].metas.map(m => m.phx_ref) <add> let curMetas = currentPresence.metas.filter(m => joinedRefs.indexOf(m.phx_ref) < 0) <add> state[key].metas.unshift(...curMetas) <ide> } <ide> onJoin(key, currentPresence, newPresence) <ide> }) <ide> delete state[key] <ide> } <ide> }) <del> }, <del> <del> list(presences, chooser){ <add> return state <add> } <add> <add> /** <add> * Returns the array of presences, with selected metadata. <add> * <add> * @param {Object} presences <add> * @param {Function} chooser <add> * <add> * @returns {Presence} <add> */ <add> static list(presences, chooser){ <ide> if(!chooser){ chooser = function(key, pres){ return pres } } <ide> <ide> return this.map(presences, (key, presence) => { <ide> return chooser(key, presence) <ide> }) <del> }, <add> } <ide> <ide> // private <ide> <del> map(obj, func){ <add> static map(obj, func){ <ide> return Object.getOwnPropertyNames(obj).map(key => func(key, obj[key])) <del> }, <del> <del> clone(obj){ return JSON.parse(JSON.stringify(obj)) } <add> } <add> <add> static clone(obj){ return JSON.parse(JSON.stringify(obj)) } <ide> } <ide> <ide> <del>// Creates a timer that accepts a `timerCalc` function to perform <del>// calculated timeout retries, such as exponential backoff. <del>// <del>// ## Examples <del>// <del>// let reconnectTimer = new Timer(() => this.connect(), function(tries){ <del>// return [1000, 5000, 10000][tries - 1] || 10000 <del>// }) <del>// reconnectTimer.scheduleTimeout() // fires after 1000 <del>// reconnectTimer.scheduleTimeout() // fires after 5000 <del>// reconnectTimer.reset() <del>// reconnectTimer.scheduleTimeout() // fires after 1000 <del>// <add>/** <add> * <add> * Creates a timer that accepts a `timerCalc` function to perform <add> * calculated timeout retries, such as exponential backoff. <add> * <add> * @example <add> * let reconnectTimer = new Timer(() => this.connect(), function(tries){ <add> * return [1000, 5000, 10000][tries - 1] || 10000 <add> * }) <add> * reconnectTimer.scheduleTimeout() // fires after 1000 <add> * reconnectTimer.scheduleTimeout() // fires after 5000 <add> * reconnectTimer.reset() <add> * reconnectTimer.scheduleTimeout() // fires after 1000 <add> * <add> * @param {Function} callback <add> * @param {Function} timerCalc <add> */ <ide> class Timer { <ide> constructor(callback, timerCalc){ <ide> this.callback = callback <ide> clearTimeout(this.timer) <ide> } <ide> <del> // Cancels any previous scheduleTimeout and schedules callback <add> /** <add> * Cancels any previous scheduleTimeout and schedules callback <add> */ <ide> scheduleTimeout(){ <ide> clearTimeout(this.timer) <ide>
JavaScript
mit
ca737ff076e11001f1c61e115ed2685191ff86b6
0
IDotD/Userscript,Idrinth/IDotD
idrinth.tier = { list: { }, addTagged: function ( name ) { var isValidParameter = function ( name ) { return name && idrinth.tier.list.hasOwnProperty ( name ) && typeof idrinth.tier.list[name] !== 'function' && !document.getElementById ( 'idrinth-tier-box-' + name ); }; var isFreeSlot = function ( key ) { return idrinth.tier.taggedSlots.hasOwnProperty ( key ) && typeof key !== 'function' && idrinth.tier.taggedSlots[key] === null; }; if ( !isValidParameter ( name ) ) { return; } var boss = this.list[name]; var make = function ( x, name ) { var makeElement = function ( label, number, description ) { return { content: label + ' ' + idrinth.ui.formatNumber ( number ), attributes: [ { name: 'title', value: description } ] }; }; var info = [ makeElement ( 'FS', boss.fs.nm, 'Fair share' ), makeElement ( 'AP', boss.ap, 'Achievement point damage' ) ]; if ( boss.os && boss.os.nm ) { info.push ( makeElement ( 'OS', boss.os.nm, 'Optimal share' ) ); info.unshift ( makeElement ( 'MA', boss.nm[boss.nm.length - 1], 'Maximum/highest tier' ) ); info.unshift ( makeElement ( 'MI', boss.nm[0], 'Minimum/lowest tier' ) ); } info.unshift ( { type: 'strong', content: boss.name.replace ( /\(.*$/, '' ) } ); idrinth.tier.taggedSlots[x] = idrinth.ui.buildElement ( { id: 'idrinth-tier-box-' + name, css: 'idrinth-hovering-box idrinth-tier-box', children: [ { children: info } ], attributes: [ { name: 'title', value: 'click to close' }, { name: 'onclick', value: 'idrinth.ui.removeElement(this.id);idrinth.tier.taggedSlots[\'' + x + '\']=null;' }, { name: 'style', value: 'left:' + x + 'px;background-image: url(https://dotd.idrinth.de/static/raid-image-service/' + boss.url + '/);' } ] } ); idrinth.ui.body.appendChild ( idrinth.tier.taggedSlots[x] ); }; for (var key in this.taggedSlots) { if ( isFreeSlot ( key ) ) { return make ( key, name ); } } idrinth.core.alert ( idrinth.text.get ( "tier.maxBoxes" ) ); }, taggedSlots: { }, start: function () { 'use strict'; var pos = 1; while ( 0 < window.innerWidth - 140 * ( pos + 1 ) ) { this.taggedSlots[( pos * 140 ).toString ()] = null; pos++; } idrinth.core.ajax.runHome ( 'tier-service/', function ( text ) { idrinth.tier.import ( text ); }, function ( ) { window.setTimeout ( idrinth.tier.start, 10000 ); }, function ( ) { window.setTimeout ( idrinth.tier.start, 10000 ); } ); }, import: function ( data ) { 'use strict'; data = JSON.parse ( data ); if ( data ) { idrinth.tier.list = data; var create = function ( field, value ) { 'use strict'; field = field.split ( '#' ); idrinth.settings.data.bannedRaids[field[1]] = value; window.localStorage.setItem ( 'idotd', JSON.stringify ( idrinth.settings.data ) ); }; for (var key in data) { if ( data[key].name ) { if ( idrinth.settings.get ( "bannedRaids#" + data[key].name ) === undefined ) { create ( data[key].name, false ); } document.getElementById ( 'idrinth-raid-may-join-list' ).appendChild ( idrinth.ui.buildElement ( { name: 'bannedRaids#' + data[key].name, rType: '#input', type: 'checkbox', id: 'idrinth-raid-may-join-list-' + data[key].name, label: idrinth.text.get ( "raids.disableJoining" ) + data[key].name } ) ); document.getElementById ( 'idrinth-raid-may-join-list' ).lastChild.setAttribute ( 'style', 'background-image:url(https://dotd.idrinth.de/static/raid-image-service/' + data[key].url + '/);' ); } } } else { window.setTimeout ( idrinth.tier.start, 1000 ); } }, getTierForName: function ( name ) { var clearInnerHtml = function ( elem ) { elem.innerHTML = ''; }; var makeList = function ( list ) { var makeField = function ( listKey, difficulty, ic ) { var ln = { type: 'td' }; try { ln.styles = idrinth.tier.list[listKey].os[difficulty] === idrinth.tier[listKey][difficulty][ic] ? 'is-os' : ''; } catch ( E ) { idrinth.core.log ( E.toString ( ) ); } try { ln.content = idrinth.ui.formatNumber ( idrinth.tier.list[listKey][difficulty][ic] ) + ' ' + idrinth.tier.list[listKey].epics[difficulty][ic] + 'E'; } catch ( E2 ) { idrinth.core.log ( E2.toString ( ) ); try { ln.content = idrinth.ui.formatNumber ( idrinth.tier.list[listKey][difficulty][ic] ); } catch ( E3 ) { idrinth.core.log ( E3.toString ( ) ); } } return ln; }; var makeRow = function ( title, dataset ) { return { type: 'tr', children: [ { type: 'th', content: title }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.n ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.h ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.l ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.nm ) } ] }; }; var wrapper = document.getElementById ( 'idrinth-tierlist' ); clearInnerHtml ( wrapper ); for (var count = list.length - 1; count >= 0; count--) { var sub = idrinth.ui.buildElement ( { css: 'tier-wrapper', children: [ { type: 'img', attributes: [ { name: 'src', value: 'https://dotd.idrinth.de/static/raid-image-service/' + idrinth.tier.list[list[count]].url + '/' } ] }, { type: 'strong', content: idrinth.tier.list[list[count]].name }, { type: 'button', content: idrinth.text.get ( "tier.tag" ), attributes: [ { name: 'onclick', value: 'idrinth.tier.addTagged(\'' + list[count].replace ( /'/g, '\\\'' ) + '\');' }, { name: 'type', value: 'action' } ] }, { type: 'span', content: 'AP: ' + idrinth.ui.formatNumber ( idrinth.tier.list[list[count]].ap ) }, { type: 'table', children: [ { type: 'thead', children: [ { type: 'tr', children: [ { type: 'th', content: '#' }, { type: 'th', content: idrinth.text.get ( "tier.diff.normal" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.hard" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.legend" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.night" ) } ] } ] }, { type: 'tbody', children: [ makeRow ( 'FS', idrinth.tier.list[list[count]].fs ), makeRow ( 'OS', idrinth.tier.list[list[count]].os ), makeRow ( '', { n: '', l: '', h: '', nm: '' } ) ] } ] } ] } ); var maxTiers = Math.max ( idrinth.tier.list[list[count]].n.length, idrinth.tier.list[list[count]].h.length, idrinth.tier.list[list[count]].l.length, idrinth.tier.list[list[count]].nm.length ); for (var ic = 0; ic < maxTiers; ic++) { sub.lastChild.lastChild.appendChild ( idrinth.ui.buildElement ( { type: 'tr', children: [ { type: 'th', content: ic + 1 }, makeField ( list[count], 'n', ic ), makeField ( list[count], 'h', ic ), makeField ( list[count], 'l', ic ), makeField ( list[count], 'nm', ic ) ] } ) ); } wrapper.appendChild ( sub ); } }; if ( !name || name.length === 0 ) { clearInnerHtml ( document.getElementById ( 'idrinth-tierlist' ) ); return; } var result = [ ]; var regExp = new RegExp ( name, 'i' ); for (var key in idrinth.tier.list) { if ( key.match ( regExp ) ) { result.push ( key ); } } makeList ( result ); } };
src/mods/tier.js
idrinth.tier = { list: { }, addTagged: function ( name ) { var isValidParameter = function ( name ) { return name && idrinth.tier.list.hasOwnProperty ( name ) && typeof idrinth.tier.list[name] !== 'function' && !document.getElementById ( 'idrinth-tier-box-' + name ); }; var isFreeSlot = function ( key ) { return idrinth.tier.taggedSlots.hasOwnProperty ( key ) && typeof key !== 'function' && idrinth.tier.taggedSlots[key] === null; }; if ( !isValidParameter ( name ) ) { return; } var boss = this.list[name]; var make = function ( x, name ) { var makeElement = function ( label, number, description ) { return { content: label + ' ' + idrinth.ui.formatNumber ( number ), attributes: [ { name: 'title', value: description } ] }; }; var info = [ makeElement ( 'FS', boss.fs.nm, 'Fair share' ), makeElement ( 'AP', boss.ap, 'Achievement point damage' ) ]; if ( boss.os && boss.os.nm ) { info.push ( makeElement ( 'OS', boss.os.nm, 'Optimal share' ) ); info.unshift ( makeElement ( 'MA', boss.nm[boss.nm.length - 1], 'Maximum/highest tier' ) ); info.unshift ( makeElement ( 'MI', boss.nm[0], 'Minimum/lowest tier' ) ); } info.unshift ( { type: 'strong', content: boss.name.replace ( /\(.*$/, '' ) } ); idrinth.tier.taggedSlots[x] = idrinth.ui.buildElement ( { id: 'idrinth-tier-box-' + name, css: 'idrinth-hovering-box idrinth-tier-box', children: [ { children: info } ], attributes: [ { name: 'title', value: 'click to close' }, { name: 'onclick', value: 'idrinth.ui.removeElement(this.id);idrinth.tier.taggedSlots[\'' + x + '\']=null;' }, { name: 'style', value: 'left:' + x + 'px;background-image: url(https://dotd.idrinth.de/static/raid-image-service/' + boss.url + '/);' } ] } ); idrinth.ui.body.appendChild ( idrinth.tier.taggedSlots[x] ); }; for (var key in this.taggedSlots) { if ( isFreeSlot ( key ) ) { return make ( key, name ); } } idrinth.core.alert ( idrinth.text.get ( "tier.maxBoxes" ) ); }, taggedSlots: { }, start: function () { 'use strict'; var pos = 1; while ( 0 < window.innerWidth - 140 * ( pos + 1 ) ) { this.taggedSlots[( pos * 140 ).toString ()] = null; pos++; } idrinth.core.ajax.runHome ( 'tier-service/', function ( text ) { idrinth.tier.import ( text ); }, function ( ) { window.setTimeout ( idrinth.tier.start, 10000 ); }, function ( ) { window.setTimeout ( idrinth.tier.start, 10000 ); } ); }, import: function ( data ) { 'use strict'; data = JSON.parse ( data ); if ( data ) { idrinth.tier.list = data; var create = function ( field, value ) { 'use strict'; field = field.split ( '#' ); if ( !field[1] ) { idrinth.settings.data[field[0]] = value; } else { idrinth.settings.data[field[0]][field[1]] = value; } window.localStorage.setItem ( 'idotd', JSON.stringify ( idrinth.settings.data ) ); }; for (var key in data) { if ( data[key].name ) { if ( idrinth.settings.get ( "bannedRaids#" + data[key].name ) === undefined ) { create ( "bannedRaids#" + data[key].name, false ); } document.getElementById ( 'idrinth-raid-may-join-list' ).appendChild ( idrinth.ui.buildElement ( { name: 'bannedRaids#' + data[key].name, rType: '#input', type: 'checkbox', id: 'idrinth-raid-may-join-list-' + data[key].name, label: idrinth.text.get ( "raids.disableJoining" ) + data[key].name } ) ); document.getElementById ( 'idrinth-raid-may-join-list' ).lastChild.setAttribute ( 'style', 'background-image:url(https://dotd.idrinth.de/static/raid-image-service/' + data[key].url + '/);' ); } } } else { window.setTimeout ( idrinth.tier.start, 1000 ); } }, getTierForName: function ( name ) { var clearInnerHtml = function ( elem ) { elem.innerHTML = ''; }; var makeList = function ( list ) { var makeField = function ( listKey, difficulty, ic ) { var ln = { type: 'td' }; try { ln.styles = idrinth.tier.list[listKey].os[difficulty] === idrinth.tier[listKey][difficulty][ic] ? 'is-os' : ''; } catch ( E ) { idrinth.core.log ( E.toString ( ) ); } try { ln.content = idrinth.ui.formatNumber ( idrinth.tier.list[listKey][difficulty][ic] ) + ' ' + idrinth.tier.list[listKey].epics[difficulty][ic] + 'E'; } catch ( E2 ) { idrinth.core.log ( E2.toString ( ) ); try { ln.content = idrinth.ui.formatNumber ( idrinth.tier.list[listKey][difficulty][ic] ); } catch ( E3 ) { idrinth.core.log ( E3.toString ( ) ); } } return ln; }; var makeRow = function ( title, dataset ) { return { type: 'tr', children: [ { type: 'th', content: title }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.n ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.h ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.l ) }, { type: 'td', content: idrinth.ui.formatNumber ( dataset.nm ) } ] }; }; var wrapper = document.getElementById ( 'idrinth-tierlist' ); clearInnerHtml ( wrapper ); for (var count = list.length - 1; count >= 0; count--) { var sub = idrinth.ui.buildElement ( { css: 'tier-wrapper', children: [ { type: 'img', attributes: [ { name: 'src', value: 'https://dotd.idrinth.de/static/raid-image-service/' + idrinth.tier.list[list[count]].url + '/' } ] }, { type: 'strong', content: idrinth.tier.list[list[count]].name }, { type: 'button', content: idrinth.text.get ( "tier.tag" ), attributes: [ { name: 'onclick', value: 'idrinth.tier.addTagged(\'' + list[count].replace ( /'/g, '\\\'' ) + '\');' }, { name: 'type', value: 'action' } ] }, { type: 'span', content: 'AP: ' + idrinth.ui.formatNumber ( idrinth.tier.list[list[count]].ap ) }, { type: 'table', children: [ { type: 'thead', children: [ { type: 'tr', children: [ { type: 'th', content: '#' }, { type: 'th', content: idrinth.text.get ( "tier.diff.normal" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.hard" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.legend" ) }, { type: 'th', content: idrinth.text.get ( "tier.diff.night" ) } ] } ] }, { type: 'tbody', children: [ makeRow ( 'FS', idrinth.tier.list[list[count]].fs ), makeRow ( 'OS', idrinth.tier.list[list[count]].os ), makeRow ( '', { n: '', l: '', h: '', nm: '' } ) ] } ] } ] } ); var maxTiers = Math.max ( idrinth.tier.list[list[count]].n.length, idrinth.tier.list[list[count]].h.length, idrinth.tier.list[list[count]].l.length, idrinth.tier.list[list[count]].nm.length ); for (var ic = 0; ic < maxTiers; ic++) { sub.lastChild.lastChild.appendChild ( idrinth.ui.buildElement ( { type: 'tr', children: [ { type: 'th', content: ic + 1 }, makeField ( list[count], 'n', ic ), makeField ( list[count], 'h', ic ), makeField ( list[count], 'l', ic ), makeField ( list[count], 'nm', ic ) ] } ) ); } wrapper.appendChild ( sub ); } }; if ( !name || name.length === 0 ) { clearInnerHtml ( document.getElementById ( 'idrinth-tierlist' ) ); return; } var result = [ ]; var regExp = new RegExp ( name, 'i' ); for (var key in idrinth.tier.list) { if ( key.match ( regExp ) ) { result.push ( key ); } } makeList ( result ); } };
simplifiing storage
src/mods/tier.js
simplifiing storage
<ide><path>rc/mods/tier.js <ide> var create = function ( field, value ) { <ide> 'use strict'; <ide> field = field.split ( '#' ); <del> if ( !field[1] ) { <del> idrinth.settings.data[field[0]] = value; <del> } else { <del> idrinth.settings.data[field[0]][field[1]] = value; <del> } <add> idrinth.settings.data.bannedRaids[field[1]] = value; <ide> window.localStorage.setItem ( 'idotd', JSON.stringify ( idrinth.settings.data ) ); <ide> }; <ide> for (var key in data) { <ide> if ( data[key].name ) { <ide> if ( idrinth.settings.get ( "bannedRaids#" + data[key].name ) === undefined ) { <del> create ( "bannedRaids#" + data[key].name, false ); <add> create ( data[key].name, false ); <ide> } <ide> document.getElementById ( 'idrinth-raid-may-join-list' ).appendChild ( idrinth.ui.buildElement ( { <ide> name: 'bannedRaids#' + data[key].name,
JavaScript
mit
a419517363f1779e8b804b3d25b9510d16ffc675
0
sidorares/node-mysql2,sidorares/node-mysql2,sidorares/node-mysql2,sidorares/node-mysql2
var mysql = require('../index.js'); var Connection = mysql.Connection; var inherits = require('util').inherits; module.exports = PoolConnection; inherits(PoolConnection, Connection); PoolConnection.statementKey = Connection.statementKey; function PoolConnection (pool, options) { Connection.call(this, options); this._pool = pool; // When a fatal error occurs the connection's protocol ends, which will cause // the connection to end as well, thus we only need to watch for the end event // and we will be notified of disconnects. var connection = this; this.on('end', function (err) { this._removeFromPool(); }); this.on('error', function (err) { this._removeFromPool(); }); } PoolConnection.prototype.release = function () { if (!this._pool || this._pool._closed) { return; } this._pool.releaseConnection(this); }; // TODO: Remove this when we are removing PoolConnection#end PoolConnection.prototype._realEnd = Connection.prototype.end; PoolConnection.prototype.end = function () { console.warn('Calling conn.end() to release a pooled connection is ' + 'deprecated. In next version calling conn.end() will be ' + 'restored to default conn.end() behavior. Use ' + 'conn.release() instead.' ); this.release(); }; PoolConnection.prototype.destroy = function () { this._removeFromPool(); return Connection.prototype.destroy.apply(this, arguments); }; PoolConnection.prototype._removeFromPool = function () { if (!this._pool || this._pool._closed) { return; } var pool = this._pool; this._pool = null; pool._removeConnection(this); };
lib/pool_connection.js
var mysql = require('../index.js'); var Connection = mysql.Connection; var inherits = require('util').inherits; module.exports = PoolConnection; inherits(PoolConnection, Connection); function PoolConnection (pool, options) { Connection.call(this, options); this._pool = pool; // When a fatal error occurs the connection's protocol ends, which will cause // the connection to end as well, thus we only need to watch for the end event // and we will be notified of disconnects. var connection = this; this.on('end', function (err) { this._removeFromPool(); }); this.on('error', function (err) { this._removeFromPool(); }); } PoolConnection.prototype.release = function () { if (!this._pool || this._pool._closed) { return; } this._pool.releaseConnection(this); }; // TODO: Remove this when we are removing PoolConnection#end PoolConnection.prototype._realEnd = Connection.prototype.end; PoolConnection.prototype.end = function () { console.warn('Calling conn.end() to release a pooled connection is ' + 'deprecated. In next version calling conn.end() will be ' + 'restored to default conn.end() behavior. Use ' + 'conn.release() instead.' ); this.release(); }; PoolConnection.prototype.destroy = function () { this._removeFromPool(); return Connection.prototype.destroy.apply(this, arguments); }; PoolConnection.prototype._removeFromPool = function () { if (!this._pool || this._pool._closed) { return; } var pool = this._pool; this._pool = null; pool._removeConnection(this); };
add ref to statementKey fontion in PoolConnection object
lib/pool_connection.js
add ref to statementKey fontion in PoolConnection object
<ide><path>ib/pool_connection.js <ide> <ide> module.exports = PoolConnection; <ide> inherits(PoolConnection, Connection); <add>PoolConnection.statementKey = Connection.statementKey; <ide> <ide> function PoolConnection (pool, options) { <ide> Connection.call(this, options);
Java
apache-2.0
dec1312f00842eb57c09ea0000a5dd1d187a39df
0
atomix/atomix,atomix/atomix,kuujo/copycat,kuujo/copycat
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.primitive.partition; import io.atomix.cluster.Node; import io.atomix.primitive.partition.impl.NodeMemberGroup; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; /** * Member group strategy. * <p> * Member group strategies are default implementations of {@link MemberGroupProvider} for built-in node attributes. */ public enum MemberGroupStrategy implements MemberGroupProvider { /** * Zone aware member group strategy. * <p> * This strategy will create a member group for each unique zone in the cluster. */ ZONE_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, node -> node.zone() != null ? node.zone() : node.id().id()); } }, /** * Rack aware member group strategy. * <p> * This strategy will create a member group for each unique rack in the cluster. */ RACK_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, node -> node.rack() != null ? node.rack() : node.id().id()); } }, /** * Host aware member group strategy. * <p> * This strategy will create a member group for each unique host in the cluster. */ HOST_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, node -> node.host() != null ? node.host() : node.id().id()); } }, /** * Node aware member group strategy (the default). * <p> * This strategy will create a member group for each node in the cluster, effectively behaving the same as if * no member groups were defined. */ NODE_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, node -> node.id().id()); } }; /** * Groups nodes by the given key function. * * @param nodes the nodes to group * @param keyFunction the key function to apply to nodes to extract a key * @return a collection of node member groups */ protected Collection<MemberGroup> groupNodes(Collection<Node> nodes, Function<Node, String> keyFunction) { Map<String, Set<Node>> groups = new HashMap<>(); for (Node node : nodes) { groups.computeIfAbsent(keyFunction.apply(node), k -> new HashSet<>()).add(node); } return groups.entrySet().stream() .map(entry -> new NodeMemberGroup(MemberGroupId.from(entry.getKey()), entry.getValue())) .collect(Collectors.toList()); } }
primitive/src/main/java/io/atomix/primitive/partition/MemberGroupStrategy.java
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.primitive.partition; import io.atomix.cluster.Node; import io.atomix.primitive.partition.impl.NodeMemberGroup; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; /** * Member group strategy. * <p> * Member group strategies are default implementations of {@link MemberGroupProvider} for built-in node attributes. */ public enum MemberGroupStrategy implements MemberGroupProvider { /** * Zone aware member group strategy. * <p> * This strategy will create a member group for each unique zone in the cluster. */ ZONE_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, Node::zone); } }, /** * Rack aware member group strategy. * <p> * This strategy will create a member group for each unique rack in the cluster. */ RACK_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, Node::rack); } }, /** * Host aware member group strategy. * <p> * This strategy will create a member group for each unique host in the cluster. */ HOST_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, Node::host); } }, /** * Node aware member group strategy (the default). * <p> * This strategy will create a member group for each node in the cluster, effectively behaving the same as if * no member groups were defined. */ NODE_AWARE { @Override public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { return groupNodes(nodes, node -> node.id().id()); } }; /** * Groups nodes by the given key function. * * @param nodes the nodes to group * @param keyFunction the key function to apply to nodes to extract a key * @return a collection of node member groups */ protected Collection<MemberGroup> groupNodes(Collection<Node> nodes, Function<Node, String> keyFunction) { Map<String, Set<Node>> groups = new HashMap<>(); for (Node node : nodes) { groups.computeIfAbsent(keyFunction.apply(node), k -> new HashSet<>()).add(node); } return groups.entrySet().stream() .map(entry -> new NodeMemberGroup(MemberGroupId.from(entry.getKey()), entry.getValue())) .collect(Collectors.toList()); } }
Handle null zone/rack/host in MemberGroupStrategy.
primitive/src/main/java/io/atomix/primitive/partition/MemberGroupStrategy.java
Handle null zone/rack/host in MemberGroupStrategy.
<ide><path>rimitive/src/main/java/io/atomix/primitive/partition/MemberGroupStrategy.java <ide> ZONE_AWARE { <ide> @Override <ide> public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { <del> return groupNodes(nodes, Node::zone); <add> return groupNodes(nodes, node -> node.zone() != null ? node.zone() : node.id().id()); <ide> } <ide> }, <ide> <ide> RACK_AWARE { <ide> @Override <ide> public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { <del> return groupNodes(nodes, Node::rack); <add> return groupNodes(nodes, node -> node.rack() != null ? node.rack() : node.id().id()); <ide> } <ide> }, <ide> <ide> HOST_AWARE { <ide> @Override <ide> public Collection<MemberGroup> getMemberGroups(Collection<Node> nodes) { <del> return groupNodes(nodes, Node::host); <add> return groupNodes(nodes, node -> node.host() != null ? node.host() : node.id().id()); <ide> } <ide> }, <ide>
Java
apache-2.0
b45b467717f1a5065160ac360d9f6499f2d88baf
0
mangstadt/vinnie
/* * MIT License * * Copyright (c) 2016 Michael Angstadt * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.mangstadt.vinnie.validate; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; import com.github.mangstadt.vinnie.SyntaxStyle; /** * <p> * Checks properties for illegal characters. * </p> * <p> * Two kinds of checking are supported: strict and non-strict. Strict ensures * that the data adhere to the specifications. Non-strict allows all characters * to be used, as long as they do not break the syntax. * </p> * @author Michael Angstadt */ public class VObjectValidator { private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> propertyName = new EnumMap<SyntaxStyle, Map<Boolean, AllowedCharacters>>(SyntaxStyle.class); static { boolean strict; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:.;") .build()); //@formatter:on } strict = true; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowPrintable() .except("[]=:.,") /* * Note: The specification's formal grammar allows semicolons to * be present in property name. This may be a mistake because * this would break the syntax. This validator will treat * semicolons as invalid in this context. * * The specifications state that semicolons can be included in * parameter values by escaping them with a backslash--however, * the specification is not clear as to whether this is also * permitted in property names. * * vCard 2.1: Section 2.1.2 * vCal 1.0: Section 2, "Property" sub-heading */ .except(';') .build()); //@formatter:on } propertyName.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //same as old style syntax map.put(strict, propertyName.get(SyntaxStyle.OLD).get(strict)); } strict = true; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allow('A', 'Z') .allow('a', 'z') .allow('0', '9') .allow('-') .build()); //@formatter:on } propertyName.put(syntax, map); } } private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> group = propertyName; private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> parameterName = new EnumMap<SyntaxStyle, Map<Boolean, AllowedCharacters>>(SyntaxStyle.class); static { boolean strict; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:;=") .build()); //@formatter:on } strict = true; { //same as property name map.put(strict, propertyName.get(syntax).get(strict)); } parameterName.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //same as old style syntax map.put(strict, parameterName.get(SyntaxStyle.OLD).get(strict)); } strict = true; { //same as property name map.put(strict, propertyName.get(syntax).get(strict)); } parameterName.put(syntax, map); } } private static final Map<SyntaxStyle, Map<Boolean, Map<Boolean, AllowedCharacters>>> parameterValue = new EnumMap<SyntaxStyle, Map<Boolean, Map<Boolean, AllowedCharacters>>>(SyntaxStyle.class); static { boolean strict, caretEncoding; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, Map<Boolean, AllowedCharacters>> map = new HashMap<Boolean, Map<Boolean, AllowedCharacters>>(); caretEncoding = false; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:") .build()); //@formatter:on } strict = true; { //same as parameter name, except semicolons are allowed //@formatter:off AllowedCharacters paramName = parameterName.get(syntax).get(strict); map2.put(strict, new AllowedCharacters.Builder(paramName) .allow(';') .build()); //@formatter::on } map.put(caretEncoding, map2); } caretEncoding = true; { /* * Same as when caret encoding is disabled because * old style syntax does not support caret encoding. */ map.put(caretEncoding, map.get(false)); } parameterValue.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, Map<Boolean, AllowedCharacters>> map = new HashMap<Boolean, Map<Boolean, AllowedCharacters>>(); caretEncoding = false; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n\"") .build()); //@formatter:on } strict = true; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowPrintable() .allow('\t') .except('"') .build()); //@formatter:on } map.put(caretEncoding, map2); } caretEncoding = true; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .build()); //@formatter:on } strict = true; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowPrintable() .allow("\r\n\t") .build()); //@formatter:on } map.put(caretEncoding, map2); } parameterValue.put(syntax, map); } } /** * Validates a property name. * @param name the property name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the property name is valid, false if not */ public static boolean validatePropertyName(String name, SyntaxStyle syntax, boolean strict) { return allowedCharactersPropertyName(syntax, strict).check(name); } /** * Gets the list of allowed characters for property names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersPropertyName(SyntaxStyle syntax, boolean strict) { return propertyName.get(syntax).get(strict); } /** * Validates a group name. * @param group the group name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the group name is valid, false if not */ public static boolean validateGroupName(String group, SyntaxStyle syntax, boolean strict) { return allowedCharactersGroup(syntax, strict).check(group); } /** * Gets the list of allowed characters for group names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersGroup(SyntaxStyle syntax, boolean strict) { return group.get(syntax).get(strict); } /** * Validates a parameter name. * @param name the parameter name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the parameter name is valid, false if not */ public static boolean validateParameterName(String name, SyntaxStyle syntax, boolean strict) { return allowedCharactersParameterName(syntax, strict).check(name); } /** * Gets the list of allowed characters for parameter names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersParameterName(SyntaxStyle syntax, boolean strict) { return parameterName.get(syntax).get(strict); } /** * Validates a parameter value. * @param value the parameter value * @param syntax the syntax style to validate against * @param caretEncoding true if caret encoding is enabled, false if not * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the parameter value is valid, false if not */ public static boolean validateParameterValue(String value, SyntaxStyle syntax, boolean caretEncoding, boolean strict) { return allowedCharactersParameterValue(syntax, caretEncoding, strict).check(value); } /** * Gets the list of allowed characters for parameter values. * @param syntax the syntax style * @param caretEncoding true if caret encoding is enabled, false if not * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersParameterValue(SyntaxStyle syntax, boolean caretEncoding, boolean strict) { return parameterValue.get(syntax).get(caretEncoding).get(strict); } }
src/main/java/com/github/mangstadt/vinnie/validate/VObjectValidator.java
/* * MIT License * * Copyright (c) 2016 Michael Angstadt * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.mangstadt.vinnie.validate; import java.util.EnumMap; import java.util.HashMap; import java.util.Map; import com.github.mangstadt.vinnie.SyntaxStyle; /** * <p> * Checks properties for illegal characters. * </p> * <p> * Two kinds of checking are supported: strict and non-strict. Strict ensures * that the data adhere to the specifications. Non-strict allows all characters * to be used, as long as they do not break the syntax. * </p> * @author Michael Angstadt */ public class VObjectValidator { private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> propertyName = new EnumMap<SyntaxStyle, Map<Boolean, AllowedCharacters>>(SyntaxStyle.class); static { boolean strict; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:.;") .build()); //@formatter:on } strict = true; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowPrintable() .allow('\t') .except("[]=:.,") /* * Note: The specification's formal grammar allows semicolons to * be present in property name. This may be a mistake because * this would break the syntax. This validator will treat * semicolons as invalid in this context. * * The specifications state that semicolons can be included in * parameter values by escaping them with a backslash--however, * the specification is not clear as to whether this is also * permitted in property names. * * vCard 2.1: Section 2.1.2 * vCal 1.0: Section 2, "Property" sub-heading */ .except(';') .build()); //@formatter:on } propertyName.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //same as old style syntax map.put(strict, propertyName.get(SyntaxStyle.OLD).get(strict)); } strict = true; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allow('A', 'Z') .allow('a', 'z') .allow('0', '9') .allow('-') .build()); //@formatter:on } propertyName.put(syntax, map); } } private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> group = propertyName; private static final Map<SyntaxStyle, Map<Boolean, AllowedCharacters>> parameterName = new EnumMap<SyntaxStyle, Map<Boolean, AllowedCharacters>>(SyntaxStyle.class); static { boolean strict; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:;=") .build()); //@formatter:on } strict = true; { //same as property name map.put(strict, propertyName.get(syntax).get(strict)); } parameterName.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, AllowedCharacters> map = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //same as old style syntax map.put(strict, parameterName.get(SyntaxStyle.OLD).get(strict)); } strict = true; { //same as property name map.put(strict, propertyName.get(syntax).get(strict)); } parameterName.put(syntax, map); } } private static final Map<SyntaxStyle, Map<Boolean, Map<Boolean, AllowedCharacters>>> parameterValue = new EnumMap<SyntaxStyle, Map<Boolean, Map<Boolean, AllowedCharacters>>>(SyntaxStyle.class); static { boolean strict, caretEncoding; SyntaxStyle syntax; syntax = SyntaxStyle.OLD; { Map<Boolean, Map<Boolean, AllowedCharacters>> map = new HashMap<Boolean, Map<Boolean, AllowedCharacters>>(); caretEncoding = false; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n:") .build()); //@formatter:on } strict = true; { //same as parameter name, except semicolons are allowed //@formatter:off AllowedCharacters paramName = parameterName.get(syntax).get(strict); map2.put(strict, new AllowedCharacters.Builder(paramName) .allow(';') .build()); //@formatter::on } map.put(caretEncoding, map2); } caretEncoding = true; { /* * Same as when caret encoding is disabled because * old style syntax does not support caret encoding. */ map.put(caretEncoding, map.get(false)); } parameterValue.put(syntax, map); } syntax = SyntaxStyle.NEW; { Map<Boolean, Map<Boolean, AllowedCharacters>> map = new HashMap<Boolean, Map<Boolean, AllowedCharacters>>(); caretEncoding = false; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .except("\r\n\"") .build()); //@formatter:on } strict = true; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowPrintable() .allow('\t') .except('"') .build()); //@formatter:on } map.put(caretEncoding, map2); } caretEncoding = true; { Map<Boolean, AllowedCharacters> map2 = new HashMap<Boolean, AllowedCharacters>(); strict = false; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowAll() .build()); //@formatter:on } strict = true; { //@formatter:off map2.put(strict, new AllowedCharacters.Builder() .allowPrintable() .allow("\r\n\t") .build()); //@formatter:on } map.put(caretEncoding, map2); } parameterValue.put(syntax, map); } } /** * Validates a property name. * @param name the property name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the property name is valid, false if not */ public static boolean validatePropertyName(String name, SyntaxStyle syntax, boolean strict) { return allowedCharactersPropertyName(syntax, strict).check(name); } /** * Gets the list of allowed characters for property names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersPropertyName(SyntaxStyle syntax, boolean strict) { return propertyName.get(syntax).get(strict); } /** * Validates a group name. * @param group the group name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the group name is valid, false if not */ public static boolean validateGroupName(String group, SyntaxStyle syntax, boolean strict) { return allowedCharactersGroup(syntax, strict).check(group); } /** * Gets the list of allowed characters for group names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersGroup(SyntaxStyle syntax, boolean strict) { return group.get(syntax).get(strict); } /** * Validates a parameter name. * @param name the parameter name * @param syntax the syntax style to validate against * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the parameter name is valid, false if not */ public static boolean validateParameterName(String name, SyntaxStyle syntax, boolean strict) { return allowedCharactersParameterName(syntax, strict).check(name); } /** * Gets the list of allowed characters for parameter names. * @param syntax the syntax style * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersParameterName(SyntaxStyle syntax, boolean strict) { return parameterName.get(syntax).get(strict); } /** * Validates a parameter value. * @param value the parameter value * @param syntax the syntax style to validate against * @param caretEncoding true if caret encoding is enabled, false if not * @param strict false to allow all characters as long as they don't break * the syntax, true for spec-compliant validation * @return true if the parameter value is valid, false if not */ public static boolean validateParameterValue(String value, SyntaxStyle syntax, boolean caretEncoding, boolean strict) { return allowedCharactersParameterValue(syntax, caretEncoding, strict).check(value); } /** * Gets the list of allowed characters for parameter values. * @param syntax the syntax style * @param caretEncoding true if caret encoding is enabled, false if not * @param strict false for the non-strict list, true for the spec-compliant * list * @return the character list */ public static AllowedCharacters allowedCharactersParameterValue(SyntaxStyle syntax, boolean caretEncoding, boolean strict) { return parameterValue.get(syntax).get(caretEncoding).get(strict); } }
Tabs are not allowed in 2.1 property names.
src/main/java/com/github/mangstadt/vinnie/validate/VObjectValidator.java
Tabs are not allowed in 2.1 property names.
<ide><path>rc/main/java/com/github/mangstadt/vinnie/validate/VObjectValidator.java <ide> //@formatter:off <ide> map.put(strict, new AllowedCharacters.Builder() <ide> .allowPrintable() <del> .allow('\t') <ide> .except("[]=:.,") <ide> <ide> /*
Java
apache-2.0
dbcdd878ca4b9d2f963da1827daea9119c7d1c81
0
alxdarksage/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF
package org.sagebionetworks.bridge.play.controllers; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyVararg; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import org.junit.Test; import org.mockito.ArgumentCaptor; import play.mvc.Result; import play.test.Helpers; import org.sagebionetworks.bridge.Roles; import org.sagebionetworks.bridge.TestConstants; import org.sagebionetworks.bridge.TestUtils; import org.sagebionetworks.bridge.dao.UploadSchemaDao; import org.sagebionetworks.bridge.exceptions.InvalidEntityException; import org.sagebionetworks.bridge.json.BridgeObjectMapper; import org.sagebionetworks.bridge.models.accounts.UserSession; import org.sagebionetworks.bridge.models.studies.StudyIdentifier; import org.sagebionetworks.bridge.models.studies.StudyIdentifierImpl; import org.sagebionetworks.bridge.models.upload.UploadSchema; import org.sagebionetworks.bridge.services.UploadSchemaService; public class UploadSchemaControllerTest { private static final String TEST_SCHEMA_ID = "controller-test-schema"; private static final String TEST_SCHEMA_JSON = "{\n" + " \"name\":\"Controller Test Schema\",\n" + " \"revision\":3,\n" + " \"schemaId\":\"controller-test-schema\",\n" + " \"schemaType\":\"ios_data\",\n" + " \"fieldDefinitions\":[\n" + " {\n" + " \"name\":\"field-name\",\n" + " \"required\":true,\n" + " \"type\":\"STRING\"\n" + " }\n" + " ]\n" + "}"; @Test public void createV4() throws Exception { // mock service UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> createdSchemaCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.createSchemaRevisionV4(eq(TestConstants.TEST_STUDY), createdSchemaCaptor.capture())).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.createSchemaRevisionV4(); assertEquals(201, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(createdSchemaCaptor); } @Test public void createSchema() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> createdSchemaArgCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.createOrUpdateUploadSchema(eq(TestConstants.TEST_STUDY), createdSchemaArgCaptor.capture())) .thenReturn(makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.createOrUpdateUploadSchema(); assertEquals(200, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(createdSchemaArgCaptor); } @Test public void deleteSchemaById() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.deleteAllRevisionsOfUploadSchema(TestConstants.TEST_STUDY_IDENTIFIER, "delete-schema"); assertEquals(200, result.status()); verify(mockSvc).deleteUploadSchemaById(TestConstants.TEST_STUDY, "delete-schema"); } @Test public void getSchemaById() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchema(TestConstants.TEST_STUDY, TEST_SCHEMA_ID)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchema(TEST_SCHEMA_ID); assertEquals(200, result.status()); assertSchemaInResult(result); } @Test public void getSchemaByIdAndRev() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaByIdAndRev(TestConstants.TEST_STUDY, TEST_SCHEMA_ID, 1)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaByIdAndRev(TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); assertSchemaInResult(result); } @Test public void getByStudyAndSchemaAndRev() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaByIdAndRev(TestConstants.TEST_STUDY, TEST_SCHEMA_ID, 1)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaByStudyAndSchemaAndRev(TestConstants.TEST_STUDY_IDENTIFIER, TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); // Unlike the other methods, this also returns study ID String resultJson = Helpers.contentAsString(result); UploadSchema resultSchema = BridgeObjectMapper.get().readValue(resultJson, UploadSchema.class); assertEquals(TEST_SCHEMA_ID, resultSchema.getSchemaId()); assertEquals(TestConstants.TEST_STUDY_IDENTIFIER, resultSchema.getStudyId()); } @Test public void getSchemasForStudy() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemasForStudy(TestConstants.TEST_STUDY)).thenReturn(ImmutableList.of( makeUploadSchemaForOutput())); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemasForStudy(); assertEquals(200, result.status()); String resultJson = Helpers.contentAsString(result); JsonNode resultNode = BridgeObjectMapper.get().readTree(resultJson); assertEquals("ResourceList", resultNode.get("type").textValue()); assertEquals(1, resultNode.get("total").intValue()); JsonNode itemListNode = resultNode.get("items"); assertEquals(1, itemListNode.size()); UploadSchema resultSchema = BridgeObjectMapper.get().treeToValue(itemListNode.get(0), UploadSchema.class); assertEquals(TEST_SCHEMA_ID, resultSchema.getSchemaId()); assertNull(resultSchema.getStudyId()); } @Test public void getAllRevisionsOfASchema() throws Exception { String schemaId = "controller-test-schema"; // Create a couple of revisions UploadSchema schema1 = makeUploadSchemaForOutput(1); UploadSchema schema2 = makeUploadSchemaForOutput(2); UploadSchema schema3 = makeUploadSchemaForOutput(3); // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaAllRevisions(TestConstants.TEST_STUDY, schemaId)).thenReturn(ImmutableList.of( schema3, schema2, schema1)); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaAllRevisions(schemaId); assertEquals(200, result.status()); String resultJson = Helpers.contentAsString(result); JsonNode resultNode = BridgeObjectMapper.get().readTree(resultJson); assertEquals("ResourceList", resultNode.get("type").textValue()); assertEquals(3, resultNode.get("total").intValue()); JsonNode itemsNode = resultNode.get("items"); assertEquals(3, itemsNode.size()); // Schemas are returned in reverse order. UploadSchema returnedSchema3 = BridgeObjectMapper.get().treeToValue(itemsNode.get(0), UploadSchema.class); assertEquals(3, returnedSchema3.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema3.getSchemaId()); assertNull(returnedSchema3.getStudyId()); UploadSchema returnedSchema2 = BridgeObjectMapper.get().treeToValue(itemsNode.get(1), UploadSchema.class); assertEquals(2, returnedSchema2.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema2.getSchemaId()); assertNull(returnedSchema2.getStudyId()); UploadSchema returnedSchema1 = BridgeObjectMapper.get().treeToValue(itemsNode.get(2), UploadSchema.class); assertEquals(1, returnedSchema1.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema1.getSchemaId()); assertNull(returnedSchema1.getStudyId()); } @Test public void updateV4() throws Exception { // mock service UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> updatedSchemaCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.updateSchemaRevisionV4(eq(TestConstants.TEST_STUDY), eq(TEST_SCHEMA_ID), eq(1), updatedSchemaCaptor.capture())).thenReturn(makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.updateSchemaRevisionV4(TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(updatedSchemaCaptor); } @Test public void invalidSchemaThrowsCompleteValidationException() throws Exception { // mock session StudyIdentifier studyIdentifier = new StudyIdentifierImpl("create-schema-study"); UserSession mockSession = new UserSession(); mockSession.setStudyIdentifier(studyIdentifier); // mock request JSON; this is pretty bad JSON. We want an error message back // that should practically tell the caller how to construct this object. String json = "{\"schemaId\":\"schemaId\",\"fieldDefinitions\":[{\"name\":\"foo\"}]}"; TestUtils.mockPlayContextWithJson(json); UploadSchema schema = BridgeObjectMapper.get().readValue(json, UploadSchema.class); // spy controller UploadSchemaController controller = spy(new UploadSchemaController()); // We need the real service because it throws the InvalidEntityException we're testing here. UploadSchemaDao uploadSchemaDao = mock(UploadSchemaDao.class); doReturn(schema).when(uploadSchemaDao).getUploadSchemaLatestRevisionById(studyIdentifier, "schemaId"); UploadSchemaService uploadSchemaService = new UploadSchemaService(); uploadSchemaService.setUploadSchemaDao(uploadSchemaDao); controller.setUploadSchemaService(uploadSchemaService); doReturn(mockSession).when(controller).getAuthenticatedSession(any(Roles.class)); // execute and validate try { controller.createOrUpdateUploadSchema(); fail("Should have thrown exception"); } catch(InvalidEntityException e) { assertEquals("name is required", e.getErrors().get("name").get(0)); assertEquals("schemaType is required", e.getErrors().get("schemaType").get(0)); assertEquals("fieldDefinitions[0].type is required", e.getErrors().get("fieldDefinitions[0].type").get(0)); } } private static UploadSchemaController setupControllerWithService(UploadSchemaService svc) throws Exception { // mock session UserSession mockSession = new UserSession(); mockSession.setStudyIdentifier(TestConstants.TEST_STUDY); // mock request JSON TestUtils.mockPlayContextWithJson(TEST_SCHEMA_JSON); // spy controller UploadSchemaController controller = spy(new UploadSchemaController()); controller.setUploadSchemaService(svc); doReturn(mockSession).when(controller).getAuthenticatedSession(anyVararg()); return controller; } private static UploadSchema makeUploadSchemaForOutput() throws Exception { return makeUploadSchemaForOutput(3); } private static UploadSchema makeUploadSchemaForOutput(int revision) throws Exception { ObjectNode node = (ObjectNode)BridgeObjectMapper.get().readTree(TEST_SCHEMA_JSON); node.put("revision", revision); // Server returns schemas with study IDs (which are filtered out selectively in some methods). node.put("studyId", TestConstants.TEST_STUDY_IDENTIFIER); return BridgeObjectMapper.get().convertValue(node, UploadSchema.class); } private static void assertSchemaInResult(Result result) throws Exception { // JSON validation is already tested, so just check obvious things like schema ID // Also, (most) method results don't include study ID String jsonText = Helpers.contentAsString(result); UploadSchema schema = BridgeObjectMapper.get().readValue(jsonText, UploadSchema.class); assertEquals(TEST_SCHEMA_ID, schema.getSchemaId()); assertNull(schema.getStudyId()); } private static void assertSchemaInArgCaptor(ArgumentCaptor<UploadSchema> argCaptor) { // Similarly, just check schema ID UploadSchema arg = argCaptor.getValue(); assertEquals(TEST_SCHEMA_ID, arg.getSchemaId()); } }
test/org/sagebionetworks/bridge/play/controllers/UploadSchemaControllerTest.java
package org.sagebionetworks.bridge.play.controllers; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyVararg; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import org.junit.Test; import org.mockito.ArgumentCaptor; import play.mvc.Result; import play.test.Helpers; import org.sagebionetworks.bridge.Roles; import org.sagebionetworks.bridge.TestConstants; import org.sagebionetworks.bridge.TestUtils; import org.sagebionetworks.bridge.exceptions.InvalidEntityException; import org.sagebionetworks.bridge.json.BridgeObjectMapper; import org.sagebionetworks.bridge.models.accounts.UserSession; import org.sagebionetworks.bridge.models.studies.StudyIdentifier; import org.sagebionetworks.bridge.models.studies.StudyIdentifierImpl; import org.sagebionetworks.bridge.models.upload.UploadSchema; import org.sagebionetworks.bridge.services.UploadSchemaService; public class UploadSchemaControllerTest { private static final String TEST_SCHEMA_ID = "controller-test-schema"; private static final String TEST_SCHEMA_JSON = "{\n" + " \"name\":\"Controller Test Schema\",\n" + " \"revision\":3,\n" + " \"schemaId\":\"controller-test-schema\",\n" + " \"schemaType\":\"ios_data\",\n" + " \"fieldDefinitions\":[\n" + " {\n" + " \"name\":\"field-name\",\n" + " \"required\":true,\n" + " \"type\":\"STRING\"\n" + " }\n" + " ]\n" + "}"; @Test public void createV4() throws Exception { // mock service UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> createdSchemaCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.createSchemaRevisionV4(eq(TestConstants.TEST_STUDY), createdSchemaCaptor.capture())).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.createSchemaRevisionV4(); assertEquals(201, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(createdSchemaCaptor); } @Test public void createSchema() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> createdSchemaArgCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.createOrUpdateUploadSchema(eq(TestConstants.TEST_STUDY), createdSchemaArgCaptor.capture())) .thenReturn(makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.createOrUpdateUploadSchema(); assertEquals(200, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(createdSchemaArgCaptor); } @Test public void deleteSchemaById() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.deleteAllRevisionsOfUploadSchema(TestConstants.TEST_STUDY_IDENTIFIER, "delete-schema"); assertEquals(200, result.status()); verify(mockSvc).deleteUploadSchemaById(TestConstants.TEST_STUDY, "delete-schema"); } @Test public void getSchemaById() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchema(TestConstants.TEST_STUDY, TEST_SCHEMA_ID)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchema(TEST_SCHEMA_ID); assertEquals(200, result.status()); assertSchemaInResult(result); } @Test public void getSchemaByIdAndRev() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaByIdAndRev(TestConstants.TEST_STUDY, TEST_SCHEMA_ID, 1)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaByIdAndRev(TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); assertSchemaInResult(result); } @Test public void getByStudyAndSchemaAndRev() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaByIdAndRev(TestConstants.TEST_STUDY, TEST_SCHEMA_ID, 1)).thenReturn( makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaByStudyAndSchemaAndRev(TestConstants.TEST_STUDY_IDENTIFIER, TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); // Unlike the other methods, this also returns study ID String resultJson = Helpers.contentAsString(result); UploadSchema resultSchema = BridgeObjectMapper.get().readValue(resultJson, UploadSchema.class); assertEquals(TEST_SCHEMA_ID, resultSchema.getSchemaId()); assertEquals(TestConstants.TEST_STUDY_IDENTIFIER, resultSchema.getStudyId()); } @Test public void getSchemasForStudy() throws Exception { // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemasForStudy(TestConstants.TEST_STUDY)).thenReturn(ImmutableList.of( makeUploadSchemaForOutput())); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemasForStudy(); assertEquals(200, result.status()); String resultJson = Helpers.contentAsString(result); JsonNode resultNode = BridgeObjectMapper.get().readTree(resultJson); assertEquals("ResourceList", resultNode.get("type").textValue()); assertEquals(1, resultNode.get("total").intValue()); JsonNode itemListNode = resultNode.get("items"); assertEquals(1, itemListNode.size()); UploadSchema resultSchema = BridgeObjectMapper.get().treeToValue(itemListNode.get(0), UploadSchema.class); assertEquals(TEST_SCHEMA_ID, resultSchema.getSchemaId()); assertNull(resultSchema.getStudyId()); } @Test public void getAllRevisionsOfASchema() throws Exception { String schemaId = "controller-test-schema"; // Create a couple of revisions UploadSchema schema1 = makeUploadSchemaForOutput(1); UploadSchema schema2 = makeUploadSchemaForOutput(2); UploadSchema schema3 = makeUploadSchemaForOutput(3); // mock UploadSchemaService UploadSchemaService mockSvc = mock(UploadSchemaService.class); when(mockSvc.getUploadSchemaAllRevisions(TestConstants.TEST_STUDY, schemaId)).thenReturn(ImmutableList.of( schema3, schema2, schema1)); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.getUploadSchemaAllRevisions(schemaId); assertEquals(200, result.status()); String resultJson = Helpers.contentAsString(result); JsonNode resultNode = BridgeObjectMapper.get().readTree(resultJson); assertEquals("ResourceList", resultNode.get("type").textValue()); assertEquals(3, resultNode.get("total").intValue()); JsonNode itemsNode = resultNode.get("items"); assertEquals(3, itemsNode.size()); // Schemas are returned in reverse order. UploadSchema returnedSchema3 = BridgeObjectMapper.get().treeToValue(itemsNode.get(0), UploadSchema.class); assertEquals(3, returnedSchema3.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema3.getSchemaId()); assertNull(returnedSchema3.getStudyId()); UploadSchema returnedSchema2 = BridgeObjectMapper.get().treeToValue(itemsNode.get(1), UploadSchema.class); assertEquals(2, returnedSchema2.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema2.getSchemaId()); assertNull(returnedSchema2.getStudyId()); UploadSchema returnedSchema1 = BridgeObjectMapper.get().treeToValue(itemsNode.get(2), UploadSchema.class); assertEquals(1, returnedSchema1.getRevision()); assertEquals(TEST_SCHEMA_ID, returnedSchema1.getSchemaId()); assertNull(returnedSchema1.getStudyId()); } @Test public void updateV4() throws Exception { // mock service UploadSchemaService mockSvc = mock(UploadSchemaService.class); ArgumentCaptor<UploadSchema> updatedSchemaCaptor = ArgumentCaptor.forClass(UploadSchema.class); when(mockSvc.updateSchemaRevisionV4(eq(TestConstants.TEST_STUDY), eq(TEST_SCHEMA_ID), eq(1), updatedSchemaCaptor.capture())).thenReturn(makeUploadSchemaForOutput()); // setup, execute, and validate UploadSchemaController controller = setupControllerWithService(mockSvc); Result result = controller.updateSchemaRevisionV4(TEST_SCHEMA_ID, 1); assertEquals(200, result.status()); assertSchemaInResult(result); assertSchemaInArgCaptor(updatedSchemaCaptor); } @Test public void invalidSchemaThrowsCompleteValidationException() throws Exception { // mock session StudyIdentifier studyIdentifier = new StudyIdentifierImpl("create-schema-study"); UserSession mockSession = new UserSession(); mockSession.setStudyIdentifier(studyIdentifier); // mock request JSON; this is pretty bad JSON. We want an error message back // that should practically tell the caller how to construct this object. TestUtils.mockPlayContextWithJson("{\"fieldDefinitions\":[{\"name\":\"foo\"}]}"); // spy controller UploadSchemaController controller = spy(new UploadSchemaController()); // We need the real service because it throws the InvalidEntityException we're testing here. controller.setUploadSchemaService(new UploadSchemaService()); doReturn(mockSession).when(controller).getAuthenticatedSession(any(Roles.class)); // execute and validate try { controller.createOrUpdateUploadSchema(); } catch(InvalidEntityException e) { assertEquals("schemaId is required", e.getErrors().get("schemaId").get(0)); assertEquals("name is required", e.getErrors().get("name").get(0)); assertEquals("schemaType is required", e.getErrors().get("schemaType").get(0)); assertEquals("fieldDefinitions[0].type is required", e.getErrors().get("fieldDefinitions[0].type").get(0)); } } private static UploadSchemaController setupControllerWithService(UploadSchemaService svc) throws Exception { // mock session UserSession mockSession = new UserSession(); mockSession.setStudyIdentifier(TestConstants.TEST_STUDY); // mock request JSON TestUtils.mockPlayContextWithJson(TEST_SCHEMA_JSON); // spy controller UploadSchemaController controller = spy(new UploadSchemaController()); controller.setUploadSchemaService(svc); doReturn(mockSession).when(controller).getAuthenticatedSession(anyVararg()); return controller; } private static UploadSchema makeUploadSchemaForOutput() throws Exception { return makeUploadSchemaForOutput(3); } private static UploadSchema makeUploadSchemaForOutput(int revision) throws Exception { ObjectNode node = (ObjectNode)BridgeObjectMapper.get().readTree(TEST_SCHEMA_JSON); node.put("revision", revision); // Server returns schemas with study IDs (which are filtered out selectively in some methods). node.put("studyId", TestConstants.TEST_STUDY_IDENTIFIER); return BridgeObjectMapper.get().convertValue(node, UploadSchema.class); } private static void assertSchemaInResult(Result result) throws Exception { // JSON validation is already tested, so just check obvious things like schema ID // Also, (most) method results don't include study ID String jsonText = Helpers.contentAsString(result); UploadSchema schema = BridgeObjectMapper.get().readValue(jsonText, UploadSchema.class); assertEquals(TEST_SCHEMA_ID, schema.getSchemaId()); assertNull(schema.getStudyId()); } private static void assertSchemaInArgCaptor(ArgumentCaptor<UploadSchema> argCaptor) { // Similarly, just check schema ID UploadSchema arg = argCaptor.getValue(); assertEquals(TEST_SCHEMA_ID, arg.getSchemaId()); } }
Fix for test. Just changing the test to match the behavior (which is correct).
test/org/sagebionetworks/bridge/play/controllers/UploadSchemaControllerTest.java
Fix for test. Just changing the test to match the behavior (which is correct).
<ide><path>est/org/sagebionetworks/bridge/play/controllers/UploadSchemaControllerTest.java <ide> <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertNull; <add>import static org.junit.Assert.fail; <ide> import static org.mockito.Mockito.any; <ide> import static org.mockito.Mockito.anyVararg; <ide> import static org.mockito.Mockito.doReturn; <ide> import org.sagebionetworks.bridge.Roles; <ide> import org.sagebionetworks.bridge.TestConstants; <ide> import org.sagebionetworks.bridge.TestUtils; <add>import org.sagebionetworks.bridge.dao.UploadSchemaDao; <ide> import org.sagebionetworks.bridge.exceptions.InvalidEntityException; <ide> import org.sagebionetworks.bridge.json.BridgeObjectMapper; <ide> import org.sagebionetworks.bridge.models.accounts.UserSession; <ide> <ide> // mock request JSON; this is pretty bad JSON. We want an error message back <ide> // that should practically tell the caller how to construct this object. <del> TestUtils.mockPlayContextWithJson("{\"fieldDefinitions\":[{\"name\":\"foo\"}]}"); <add> String json = "{\"schemaId\":\"schemaId\",\"fieldDefinitions\":[{\"name\":\"foo\"}]}"; <add> TestUtils.mockPlayContextWithJson(json); <add> <add> UploadSchema schema = BridgeObjectMapper.get().readValue(json, UploadSchema.class); <ide> <ide> // spy controller <ide> UploadSchemaController controller = spy(new UploadSchemaController()); <ide> // We need the real service because it throws the InvalidEntityException we're testing here. <del> controller.setUploadSchemaService(new UploadSchemaService()); <add> <add> UploadSchemaDao uploadSchemaDao = mock(UploadSchemaDao.class); <add> doReturn(schema).when(uploadSchemaDao).getUploadSchemaLatestRevisionById(studyIdentifier, "schemaId"); <add> UploadSchemaService uploadSchemaService = new UploadSchemaService(); <add> uploadSchemaService.setUploadSchemaDao(uploadSchemaDao); <add> <add> controller.setUploadSchemaService(uploadSchemaService); <ide> doReturn(mockSession).when(controller).getAuthenticatedSession(any(Roles.class)); <ide> <ide> // execute and validate <ide> try { <ide> controller.createOrUpdateUploadSchema(); <add> fail("Should have thrown exception"); <ide> } catch(InvalidEntityException e) { <del> assertEquals("schemaId is required", e.getErrors().get("schemaId").get(0)); <ide> assertEquals("name is required", e.getErrors().get("name").get(0)); <ide> assertEquals("schemaType is required", e.getErrors().get("schemaType").get(0)); <ide> assertEquals("fieldDefinitions[0].type is required", e.getErrors().get("fieldDefinitions[0].type").get(0)); <ide> } <ide> } <del> <add> <ide> private static UploadSchemaController setupControllerWithService(UploadSchemaService svc) throws Exception { <ide> // mock session <ide> UserSession mockSession = new UserSession();
JavaScript
mit
76c8906f50d0a3deb550e5f45cc64cb39913d466
0
yourpalal/backtalk,yourpalal/backtalk
'use strict'; var argparser = require('argparser') .nonvals("ast") .parse(); var BT = require('./back_talker'); var readline = require('readline'); var rl = readline.createInterface({ input: process.stdin, output: process.stdout }); var scope = new BT.Scope(), context = new BT.Context(), evaluator = new BT.Evaluator(scope, context), running = true ; context.addFunc({ patterns: ['q'], impl: function() { running = false; return 'goodbye!';} }); function loop() { rl.question('$>: ', function(answer) { try { var ast = BT.parse(answer); if (argparser.opt("ast")) { console.log(ast, typeof ast); } console.log(evaluator.eval(ast)); } catch (e) { if (e instanceof BT.AST.ParseError) { console.log(e); } else { throw e; } } if (!running) { rl.close(); process.exit(); } loop(); }); }; loop();
repl.js
'use strict'; var argparser = require('argparser') .nonvals("ast") .parse(); var BT = require('./back_talker'); var readline = require('readline'); var rl = readline.createInterface({ input: process.stdin, output: process.stdout }); var scope = new BT.Scope(), evaluator = new BT.Evaluator(scope) ; function loop() { rl.question('$>: ', function(answer) { if (answer == 'q') { rl.close(); process.exit(); } try { var ast = BT.parse(answer); if (argparser.opt("ast")) { console.log(ast, typeof ast); } console.log(evaluator.eval(ast)); } catch (e) { if (e instanceof BT.AST.ParseError) { console.log(e); } else { throw e; } } loop(); }); }; loop();
Replace 'q' hack in repl with a function!
repl.js
Replace 'q' hack in repl with a function!
<ide><path>epl.js <ide> <ide> <ide> var scope = new BT.Scope(), <del> evaluator = new BT.Evaluator(scope) <add> context = new BT.Context(), <add> evaluator = new BT.Evaluator(scope, context), <add> running = true <ide> ; <add> <add>context.addFunc({ <add> patterns: ['q'], <add> impl: function() { running = false; return 'goodbye!';} <add>}); <add> <ide> <ide> function loop() { <ide> rl.question('$>: ', function(answer) { <del> if (answer == 'q') { <del> rl.close(); <del> process.exit(); <del> } <ide> <ide> try { <ide> var ast = BT.parse(answer); <ide> throw e; <ide> } <ide> } <add> if (!running) { <add> rl.close(); <add> process.exit(); <add> } <ide> loop(); <ide> }); <ide> };
Java
mit
0c56f21a5a9c52a8b3dc7e89b8c5ef0f98b65209
0
hpe-idol/java-iod-client,hpautonomy/java-hod-client,hpautonomy/java-iod-client,hpautonomy/java-hod-client,hpe-idol/java-iod-client,hpe-idol/java-hod-client,hpautonomy/java-iod-client,hpe-idol/java-hod-client,hpautonomy/java-hod-client,hpe-idol/java-hod-client
/* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.iod.client.search; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import com.hp.autonomy.iod.client.converter.DoNotConvert; import com.hp.autonomy.iod.client.util.MultiMap; import lombok.Setter; import lombok.experimental.Accessors; import org.apache.commons.lang.StringUtils; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * Helper class for building up optional parameters for the Query Text Index API. The default value for all parameters * is null. Null parameters will not be sent to IDOL OnDemand */ @Setter @Accessors(chain = true) public class QueryTextIndexRequestBuilder { private static final DateTimeFormatter DATE_FORMAT = DateTimeFormat.forPattern("HH:mm:ss dd/MM/yyyy G"); /** * @param maxDate A DateTime to use as the value for the max_date parameter. This parameter takes precedence over * maxDateDays and maxDateSeconds */ private DateTime maxDate; /** * @param minDate A DateTime to use as the value for the min_date parameter. This parameter takes precedence over * minDateDays and minDateSeconds */ private DateTime minDate; /** * @param maxDateDays A number of days to use as the value for the max_date parameter. This parameter takes * precedence over maxDateSeconds */ private Long maxDateDays; /** * @param maxDateSeconds A number of seconds to use as the value for the max_date parameter. */ private Long maxDateSeconds; /** * @param minDateDays A number of days to use as the value for the min_date parameter. This parameter takes * precedence over maxDateSeconds */ private Long minDateDays; /** * @param minDateSeconds A number of seconds to use as the value for the min_date parameter. */ private Long minDateSeconds; /** * @param endTag Value for the end_tag parameter */ private String endTag; /** * @param fieldText Value for the field_text parameter */ private String fieldText; /** * @param highlight Value for the highlight parameter */ private Highlight highlight; /** * @param absoluteMaxResults Value for the absolute_max_results parameter */ private Integer absoluteMaxResults; /** * @param maxPageResults Value for the max_page_results parameter */ private Integer maxPageResults; /** * @param minScore Value for the min_score parameter */ private Integer minScore; /** * @param print Value for the print parameter */ private Print print; /** * @param printFields Value for the print_fields parameter. This list will be joined with commas before being sent * to the server */ private List<String> printFields; /** * @param start Value for the start parameter */ private Integer start; /** * @param sort Value for the sort parameter */ private Sort sort; /** * @param startTag Value for the start_tag parameter */ private String startTag; /** * @param summary Value for the summary parameter */ private Summary summary; /** * @param totalResults Value for the total_results parameter */ private Boolean totalResults; private List<String> indexes = new ArrayList<>(); /** * Sets the value of the indexes parameter * @param index0 The first index * @param indexes The remaining indexes * @return this */ public QueryTextIndexRequestBuilder addIndexes(final String index0, final String... indexes) { this.indexes.add(index0); this.indexes.addAll(Arrays.asList(indexes)); return this; } /** * Sets the value of the indexes parameter * @param indexes The indexes to query * @return this */ public QueryTextIndexRequestBuilder setIndexes(final List<String> indexes) { this.indexes = indexes; return this; } /** * @return A map of query parameters suitable for use with {@link QueryTextIndexService}. get is NOT supported on * the resulting map */ public Map<String, Object> build() { final Map<String, Object> map = new MultiMap<>(); map.put("end_tag", endTag); map.put("field_text", fieldText); map.put("highlight", highlight); map.put("absolute_max_results", absoluteMaxResults); map.put("max_page_results", maxPageResults); map.put("min_score", minScore); map.put("print", print); map.put("print_fields", StringUtils.join(printFields, ',')); map.put("sort", sort); map.put("start", start); map.put("start_tag", startTag); map.put("summary", summary); map.put("total_results", totalResults); // prefer the DateTime over the numeric versions if(minDate != null) { map.put("min_date", DATE_FORMAT.print(minDate)); } else if(minDateDays != null) { map.put("min_date", minDateDays); } else if(maxDateSeconds != null) { map.put("min_date", minDateSeconds + "s"); } if(maxDate != null) { map.put("max_date", DATE_FORMAT.print(maxDate)); } else if(maxDateDays != null) { map.put("max_date", maxDateDays); } else if(maxDateSeconds != null) { map.put("max_date", maxDateSeconds + "s"); } for(final String index : indexes) { map.put("indexes", index); } return map; } /** * Enum type representing the possible options for the print parameter */ @DoNotConvert public enum Print { all, all_sections, date, fields, none, no_results, parametric, reference } /** * Enum type representing the possible options for the highlight parameter */ @DoNotConvert public enum Highlight { off, terms, sentences } /** * Enum type representing the possible options for the sort parameter */ @DoNotConvert public enum Sort { autn_rank, date, off, relevance, reverse_date, reverse_relevance } /** * Enum type representing the possible options for the summary parameter */ @DoNotConvert public enum Summary { context, concept, quick, off } }
src/main/java/com/hp/autonomy/iod/client/search/QueryTextIndexRequestBuilder.java
/* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.iod.client.search; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import com.hp.autonomy.iod.client.converter.DoNotConvert; import com.hp.autonomy.iod.client.util.MultiMap; import lombok.Setter; import lombok.experimental.Accessors; import org.apache.commons.lang.StringUtils; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * Helper class for building up optional parameters for the Query Text Index API. The default value for all parameters * is null. Null parameters will not be sent to IDOL OnDemand */ @Setter @Accessors(chain = true) public class QueryTextIndexRequestBuilder { private static final DateTimeFormatter DATE_FORMAT = DateTimeFormat.forPattern("HH:mm:ss dd/MM/yyyy G"); /** * @param maxDate A DateTime to use as the value for the max_date parameter. This parameter takes precedence over * maxDateDays and maxDateSeconds */ private DateTime maxDate; /** * @param minDate A DateTime to use as the value for the min_date parameter. This parameter takes precedence over * minDateDays and minDateSeconds */ private DateTime minDate; /** * @param maxDateDays A number of days to use as the value for the max_date parameter. This parameter takes * precedence over maxDateSeconds */ private Long maxDateDays; /** * @param maxDateSeconds A number of seconds to use as the value for the max_date parameter. */ private Long maxDateSeconds; /** * @param minDateDays A number of days to use as the value for the min_date parameter. This parameter takes * precedence over maxDateSeconds */ private Long minDateDays; /** * @param minDateSeconds A number of seconds to use as the value for the min_date parameter. */ private Long minDateSeconds; /** * @param endTag Value for the end_tag parameter */ private String endTag; /** * @param fieldText Value for the field_text parameter */ private String fieldText; /** * @param highlight Value for the highlight parameter */ private Highlight highlight; /** * @param absoluteMaxResults Value for the absolute_max_results parameter */ private Integer absoluteMaxResults; /** * @param maxPageResults Value for the max_page_results parameter */ private Integer maxPageResults; /** * @param minScore Value for the min_score parameter */ private Integer minScore; /** * @param print Value for the print parameter */ private Print print; /** * @param printFields Value for the print_fields parameter. This list will be joined with commas before being sent * to the server */ private List<String> printFields; /** * @param start Value for the start parameter */ private Integer start; /** * @param sort Value for the sort parameter */ private Sort sort; /** * @param startTag Value for the start_tag parameter */ private String startTag; /** * @param summary Value for the summary parameter */ private Summary summary; /** * @param totalResults Value for the total_results parameter */ private Boolean totalResults; private List<String> indexes = new ArrayList<>(); /** * Sets the value of the indexes parameter * @param index0 The first index * @param indexes The remaining indexes * @return this */ public QueryTextIndexRequestBuilder setIndexes(final String index0, final String... indexes) { this.indexes.add(index0); this.indexes.addAll(Arrays.asList(indexes)); return this; } /** * Sets the value of the indexes parameter * @param indexes The indexes to query * @return this */ public QueryTextIndexRequestBuilder setIndexes(final List<String> indexes) { this.indexes = indexes; return this; } /** * @return A map of query parameters suitable for use with {@link QueryTextIndexService}. get is NOT supported on * the resulting map */ public Map<String, Object> build() { final Map<String, Object> map = new MultiMap<>(); map.put("end_tag", endTag); map.put("field_text", fieldText); map.put("highlight", highlight); map.put("absolute_max_results", absoluteMaxResults); map.put("max_page_results", maxPageResults); map.put("min_score", minScore); map.put("print", print); map.put("print_fields", StringUtils.join(printFields, ',')); map.put("sort", sort); map.put("start", start); map.put("start_tag", startTag); map.put("summary", summary); map.put("total_results", totalResults); // prefer the DateTime over the numeric versions if(minDate != null) { map.put("min_date", DATE_FORMAT.print(minDate)); } else if(minDateDays != null) { map.put("min_date", minDateDays); } else if(maxDateSeconds != null) { map.put("min_date", minDateSeconds + "s"); } if(maxDate != null) { map.put("max_date", DATE_FORMAT.print(maxDate)); } else if(maxDateDays != null) { map.put("max_date", maxDateDays); } else if(maxDateSeconds != null) { map.put("max_date", maxDateSeconds + "s"); } for(final String index : indexes) { map.put("indexes", index); } return map; } /** * Enum type representing the possible options for the print parameter */ @DoNotConvert public enum Print { all, all_sections, date, fields, none, no_results, parametric, reference } /** * Enum type representing the possible options for the highlight parameter */ @DoNotConvert public enum Highlight { off, terms, sentences } /** * Enum type representing the possible options for the sort parameter */ @DoNotConvert public enum Sort { autn_rank, date, off, relevance, reverse_date, reverse_relevance } /** * Enum type representing the possible options for the summary parameter */ @DoNotConvert public enum Summary { context, concept, quick, off } }
Rename method as the old name possibly implies it does something different.
src/main/java/com/hp/autonomy/iod/client/search/QueryTextIndexRequestBuilder.java
Rename method as the old name possibly implies it does something different.
<ide><path>rc/main/java/com/hp/autonomy/iod/client/search/QueryTextIndexRequestBuilder.java <ide> * @param indexes The remaining indexes <ide> * @return this <ide> */ <del> public QueryTextIndexRequestBuilder setIndexes(final String index0, final String... indexes) { <add> public QueryTextIndexRequestBuilder addIndexes(final String index0, final String... indexes) { <ide> this.indexes.add(index0); <ide> this.indexes.addAll(Arrays.asList(indexes)); <ide>
Java
apache-2.0
3d9be813774ac8c8b0ab325402b609d364930a6b
0
KatsuraKKKK/netty,f7753/netty,orika/netty,LuminateWireless/netty,tempbottle/netty,windie/netty,eonezhang/netty,mosoft521/netty,artgon/netty,netty/netty,yipen9/netty,orika/netty,nayato/netty,LuminateWireless/netty,qingsong-xu/netty,brennangaunce/netty,olupotd/netty,zer0se7en/netty,Apache9/netty,luyiisme/netty,mosoft521/netty,lukehutch/netty,drowning/netty,mway08/netty,Apache9/netty,gigold/netty,danny200309/netty,timboudreau/netty,mosoft521/netty,gerdriesselmann/netty,seetharamireddy540/netty,Squarespace/netty,lukw00/netty,wuxiaowei907/netty,idelpivnitskiy/netty,nayato/netty,brennangaunce/netty,exinguu/netty,liyang1025/netty,woshilaiceshide/netty,altihou/netty,bob329/netty,Kingson4Wu/netty,wangyikai/netty,ioanbsu/netty,nadeeshaan/netty,sverkera/netty,timboudreau/netty,kjniemi/netty,moyiguket/netty,yonglehou/netty-1,timboudreau/netty,normanmaurer/netty,slandelle/netty,AnselQiao/netty,doom369/netty,johnou/netty,fengjiachun/netty,Kingson4Wu/netty,louiscryan/netty,zhoffice/netty,s-gheldd/netty,Kalvar/netty,carlbai/netty,serioussam/netty,x1957/netty,maliqq/netty,chanakaudaya/netty,AchinthaReemal/netty,imangry/netty-zh,xiexingguang/netty,BrunoColin/netty,balaprasanna/netty,altihou/netty,wuyinxian124/netty,zzcclp/netty,afds/netty,johnou/netty,zxhfirefox/netty,gerdriesselmann/netty,clebertsuconic/netty,zhoffice/netty,afds/netty,wangyikai/netty,mikkokar/netty,jongyeol/netty,chinayin/netty,castomer/netty,MediumOne/netty,rovarga/netty,Alwayswithme/netty,KatsuraKKKK/netty,kvr000/netty,mikkokar/netty,qingsong-xu/netty,mx657649013/netty,lightsocks/netty,hepin1989/netty,kiril-me/netty,fantayeneh/netty,ninja-/netty,bob329/netty,ejona86/netty,maliqq/netty,shelsonjava/netty,ngocdaothanh/netty,Kalvar/netty,silvaran/netty,lukehutch/netty,tbrooks8/netty,kyle-liu/netty4study,qingsong-xu/netty,luyiisme/netty,blucas/netty,jovezhougang/netty,JungMinu/netty,junjiemars/netty,chinayin/netty,ejona86/netty,zhujingling/netty,huanyi0723/netty,Scottmitch/netty,yawkat/netty,timboudreau/netty,tbrooks8/netty,sunbeansoft/netty,golovnin/netty,shism/netty,jovezhougang/netty,drowning/netty,mosoft521/netty,s-gheldd/netty,zzcclp/netty,hgl888/netty,caoyanwei/netty,phlizik/netty,kvr000/netty,caoyanwei/netty,lukw00/netty,shism/netty,ifesdjeen/netty,nadeeshaan/netty,shenguoquan/netty,kiril-me/netty,ichaki5748/netty,buchgr/netty,nkhuyu/netty,xiongzheng/netty,hgl888/netty,zhoffice/netty,mcanthony/netty,kiril-me/netty,mway08/netty,DavidAlphaFox/netty,huanyi0723/netty,serioussam/netty,altihou/netty,mubarak/netty,danbev/netty,unei66/netty,daschl/netty,hgl888/netty,alkemist/netty,nmittler/netty,kjniemi/netty,wuyinxian124/netty,castomer/netty,phlizik/netty,mcobrien/netty,AnselQiao/netty,xingguang2013/netty,shenguoquan/netty,Alwayswithme/netty,fenik17/netty,NiteshKant/netty,jongyeol/netty,exinguu/netty,fengshao0907/netty,lznhust/netty,mubarak/netty,codevelop/netty,tempbottle/netty,Techcable/netty,AchinthaReemal/netty,MediumOne/netty,carl-mastrangelo/netty,LuminateWireless/netty,Kalvar/netty,woshilaiceshide/netty,joansmith/netty,lugt/netty,duqiao/netty,Apache9/netty,Spikhalskiy/netty,menacher/netty,eonezhang/netty,cnoldtree/netty,firebase/netty,DolphinZhao/netty,Squarespace/netty,DolphinZhao/netty,huuthang1993/netty,chanakaudaya/netty,bob329/netty,yrcourage/netty,ijuma/netty,lightsocks/netty,yrcourage/netty,jchambers/netty,daschl/netty,mx657649013/netty,youprofit/netty,sja/netty,sunbeansoft/netty,jenskordowski/netty,bryce-anderson/netty,danny200309/netty,castomer/netty,NiteshKant/netty,moyiguket/netty,bryce-anderson/netty,slandelle/netty,djchen/netty,gigold/netty,zhujingling/netty,mikkokar/netty,x1957/netty,danbev/netty,cnoldtree/netty,afredlyj/learn-netty,x1957/netty,bob329/netty,maliqq/netty,sverkera/netty,lightsocks/netty,zxhfirefox/netty,nat2013/netty,djchen/netty,junjiemars/netty,fengjiachun/netty,dongjiaqiang/netty,fantayeneh/netty,serioussam/netty,blademainer/netty,louxiu/netty,jdivy/netty,zzcclp/netty,bigheary/netty,eonezhang/netty,DavidAlphaFox/netty,chinayin/netty,ioanbsu/netty,wuxiaowei907/netty,SinaTadayon/netty,alkemist/netty,imangry/netty-zh,seetharamireddy540/netty,sja/netty,yrcourage/netty,xiongzheng/netty,lznhust/netty,seetharamireddy540/netty,blucas/netty,xiexingguang/netty,sameira/netty,afds/netty,mx657649013/netty,chrisprobst/netty,skyao/netty,niuxinghua/netty,Scottmitch/netty,mway08/netty,joansmith/netty,skyao/netty,zxhfirefox/netty,cnoldtree/netty,hyangtack/netty,nmittler/netty,x1957/netty,purplefox/netty-4.0.2.8-hacked,golovnin/netty,gerdriesselmann/netty,orika/netty,youprofit/netty,BrunoColin/netty,carl-mastrangelo/netty,DavidAlphaFox/netty,tempbottle/netty,bryce-anderson/netty,x1957/netty,IBYoung/netty,mway08/netty,ijuma/netty,maliqq/netty,WangJunTYTL/netty,nayato/netty,blucas/netty,artgon/netty,youprofit/netty,sverkera/netty,hyangtack/netty,yrcourage/netty,xiexingguang/netty,blademainer/netty,louiscryan/netty,huanyi0723/netty,jdivy/netty,joansmith/netty,ajaysarda/netty,mubarak/netty,caoyanwei/netty,netty/netty,jovezhougang/netty,carlbai/netty,liyang1025/netty,jchambers/netty,louiscryan/netty,carl-mastrangelo/netty,qingsong-xu/netty,lukw00/netty,louxiu/netty,zhujingling/netty,afds/netty,jongyeol/netty,Spikhalskiy/netty,BrunoColin/netty,unei66/netty,yawkat/netty,zzcclp/netty,eincs/netty,rovarga/netty,eonezhang/netty,zer0se7en/netty,dongjiaqiang/netty,mcanthony/netty,balaprasanna/netty,nayato/netty,satishsaley/netty,codevelop/netty,zer0se7en/netty,buchgr/netty,shenguoquan/netty,satishsaley/netty,liuciuse/netty,ichaki5748/netty,ichaki5748/netty,codevelop/netty,ijuma/netty,tbrooks8/netty,WangJunTYTL/netty,lukw00/netty,netty/netty,louxiu/netty,timboudreau/netty,junjiemars/netty,ioanbsu/netty,Squarespace/netty,shelsonjava/netty,johnou/netty,chanakaudaya/netty,smayoorans/netty,eincs/netty,castomer/netty,sameira/netty,liuciuse/netty,silvaran/netty,idelpivnitskiy/netty,gerdriesselmann/netty,satishsaley/netty,junjiemars/netty,normanmaurer/netty,alkemist/netty,afredlyj/learn-netty,netty/netty,rovarga/netty,idelpivnitskiy/netty,jenskordowski/netty,ninja-/netty,zhoffice/netty,smayoorans/netty,jchambers/netty,balaprasanna/netty,liyang1025/netty,mcanthony/netty,mosoft521/netty,Alwayswithme/netty,carlbai/netty,liuciuse/netty,kjniemi/netty,Alwayswithme/netty,ejona86/netty,wangyikai/netty,joansmith/netty,tbrooks8/netty,Mounika-Chirukuri/netty,doom369/netty,yawkat/netty,develar/netty,BrunoColin/netty,f7753/netty,golovnin/netty,danny200309/netty,mikkokar/netty,DolphinZhao/netty,olupotd/netty,eincs/netty,DavidAlphaFox/netty,nmittler/netty,blademainer/netty,jovezhougang/netty,fenik17/netty,fengshao0907/netty,clebertsuconic/netty,tbrooks8/netty,unei66/netty,carlbai/netty,ajaysarda/netty,lightsocks/netty,chanakaudaya/netty,sammychen105/netty,DolphinZhao/netty,WangJunTYTL/netty,zhujingling/netty,windie/netty,AchinthaReemal/netty,andsel/netty,lugt/netty,djchen/netty,chinayin/netty,nkhuyu/netty,kvr000/netty,kyle-liu/netty4study,xiongzheng/netty,woshilaiceshide/netty,normanmaurer/netty,junjiemars/netty,gigold/netty,sameira/netty,WangJunTYTL/netty,jdivy/netty,jdivy/netty,orika/netty,CodingFabian/netty,JungMinu/netty,niuxinghua/netty,Spikhalskiy/netty,CodingFabian/netty,hepin1989/netty,fenik17/netty,hepin1989/netty,LuminateWireless/netty,windie/netty,xingguang2013/netty,mcobrien/netty,windie/netty,mway08/netty,firebase/netty,xingguang2013/netty,yipen9/netty,ngocdaothanh/netty,bryce-anderson/netty,SinaTadayon/netty,lugt/netty,brennangaunce/netty,purplefox/netty-4.0.2.8-hacked,Spikhalskiy/netty,idelpivnitskiy/netty,s-gheldd/netty,Mounika-Chirukuri/netty,hyangtack/netty,nkhuyu/netty,jongyeol/netty,WangJunTYTL/netty,ninja-/netty,jongyeol/netty,hepin1989/netty,sja/netty,lznhust/netty,bryce-anderson/netty,f7753/netty,chinayin/netty,xingguang2013/netty,skyao/netty,eonezhang/netty,danbev/netty,Techcable/netty,ngocdaothanh/netty,yawkat/netty,ijuma/netty,Kingson4Wu/netty,SinaTadayon/netty,exinguu/netty,jchambers/netty,IBYoung/netty,imangry/netty-zh,golovnin/netty,chanakaudaya/netty,Kingson4Wu/netty,gigold/netty,silvaran/netty,shenguoquan/netty,louxiu/netty,ijuma/netty,luyiisme/netty,ajaysarda/netty,lukehutch/netty,ninja-/netty,lukehutch/netty,phlizik/netty,Techcable/netty,nat2013/netty,f7753/netty,niuxinghua/netty,duqiao/netty,djchen/netty,doom369/netty,menacher/netty,mx657649013/netty,danbev/netty,liyang1025/netty,slandelle/netty,silvaran/netty,sameira/netty,Kalvar/netty,caoyanwei/netty,bigheary/netty,wangyikai/netty,SinaTadayon/netty,caoyanwei/netty,sverkera/netty,huuthang1993/netty,nkhuyu/netty,niuxinghua/netty,danny200309/netty,CodingFabian/netty,gigold/netty,Techcable/netty,andsel/netty,develar/netty,pengzj/netty,LuminateWireless/netty,ninja-/netty,chrisprobst/netty,ifesdjeen/netty,andsel/netty,smayoorans/netty,kvr000/netty,kjniemi/netty,unei66/netty,yawkat/netty,Mounika-Chirukuri/netty,johnou/netty,ngocdaothanh/netty,eincs/netty,sverkera/netty,pengzj/netty,blucas/netty,yipen9/netty,huuthang1993/netty,drowning/netty,wuxiaowei907/netty,zhujingling/netty,zzcclp/netty,kiril-me/netty,yonglehou/netty-1,shism/netty,AchinthaReemal/netty,fantayeneh/netty,firebase/netty,moyiguket/netty,AnselQiao/netty,Squarespace/netty,shuangqiuan/netty,blucas/netty,brennangaunce/netty,jenskordowski/netty,serioussam/netty,andsel/netty,idelpivnitskiy/netty,sja/netty,alkemist/netty,sammychen105/netty,bob329/netty,exinguu/netty,MediumOne/netty,mikkokar/netty,mcobrien/netty,shelsonjava/netty,smayoorans/netty,mcanthony/netty,jenskordowski/netty,kjniemi/netty,shenguoquan/netty,woshilaiceshide/netty,golovnin/netty,mubarak/netty,brennangaunce/netty,castomer/netty,s-gheldd/netty,kvr000/netty,lukw00/netty,afds/netty,duqiao/netty,Squarespace/netty,woshilaiceshide/netty,MediumOne/netty,youprofit/netty,balaprasanna/netty,ichaki5748/netty,NiteshKant/netty,mubarak/netty,wuxiaowei907/netty,pengzj/netty,ejona86/netty,olupotd/netty,louiscryan/netty,eincs/netty,zer0se7en/netty,CodingFabian/netty,shelsonjava/netty,NiteshKant/netty,yonglehou/netty-1,unei66/netty,zxhfirefox/netty,dongjiaqiang/netty,purplefox/netty-4.0.2.8-hacked,fengjiachun/netty,Kingson4Wu/netty,silvaran/netty,fantayeneh/netty,liyang1025/netty,sunbeansoft/netty,danny200309/netty,IBYoung/netty,daschl/netty,moyiguket/netty,djchen/netty,sunbeansoft/netty,shuangqiuan/netty,Scottmitch/netty,zer0se7en/netty,fenik17/netty,clebertsuconic/netty,ichaki5748/netty,BrunoColin/netty,doom369/netty,seetharamireddy540/netty,nadeeshaan/netty,duqiao/netty,Apache9/netty,shuangqiuan/netty,nayato/netty,AnselQiao/netty,nkhuyu/netty,KatsuraKKKK/netty,smayoorans/netty,qingsong-xu/netty,shelsonjava/netty,clebertsuconic/netty,tempbottle/netty,jovezhougang/netty,olupotd/netty,DolphinZhao/netty,joansmith/netty,Techcable/netty,NiteshKant/netty,Kalvar/netty,bigheary/netty,blademainer/netty,yrcourage/netty,maliqq/netty,seetharamireddy540/netty,lukehutch/netty,KatsuraKKKK/netty,dongjiaqiang/netty,JungMinu/netty,Scottmitch/netty,phlizik/netty,fengjiachun/netty,AchinthaReemal/netty,skyao/netty,wuyinxian124/netty,wuyinxian124/netty,fenik17/netty,xiongzheng/netty,wangyikai/netty,lznhust/netty,slandelle/netty,lugt/netty,xiexingguang/netty,hyangtack/netty,rovarga/netty,duqiao/netty,louiscryan/netty,f7753/netty,purplefox/netty-4.0.2.8-hacked,hgl888/netty,netty/netty,jchambers/netty,lightsocks/netty,lznhust/netty,skyao/netty,chrisprobst/netty,blademainer/netty,chrisprobst/netty,normanmaurer/netty,satishsaley/netty,mcobrien/netty,johnou/netty,normanmaurer/netty,CodingFabian/netty,danbev/netty,huanyi0723/netty,firebase/netty,artgon/netty,mx657649013/netty,lugt/netty,chrisprobst/netty,imangry/netty-zh,JungMinu/netty,SinaTadayon/netty,buchgr/netty,Mounika-Chirukuri/netty,orika/netty,niuxinghua/netty,Apache9/netty,yipen9/netty,Mounika-Chirukuri/netty,MediumOne/netty,Scottmitch/netty,huanyi0723/netty,satishsaley/netty,huuthang1993/netty,ejona86/netty,IBYoung/netty,cnoldtree/netty,Spikhalskiy/netty,doom369/netty,windie/netty,afredlyj/learn-netty,carlbai/netty,buchgr/netty,kiril-me/netty,carl-mastrangelo/netty,xiongzheng/netty,ioanbsu/netty,Alwayswithme/netty,luyiisme/netty,artgon/netty,exinguu/netty,s-gheldd/netty,ajaysarda/netty,nat2013/netty,wuxiaowei907/netty,shism/netty,sja/netty,alkemist/netty,fengshao0907/netty,fengjiachun/netty,xiexingguang/netty,nadeeshaan/netty,sunbeansoft/netty,yonglehou/netty-1,zxhfirefox/netty,yonglehou/netty-1,bigheary/netty,youprofit/netty,louxiu/netty,jenskordowski/netty,AnselQiao/netty,ajaysarda/netty,bigheary/netty,olupotd/netty,nadeeshaan/netty,mcobrien/netty,pengzj/netty,xingguang2013/netty,moyiguket/netty,KatsuraKKKK/netty,imangry/netty-zh,sammychen105/netty,luyiisme/netty,hgl888/netty,shism/netty,liuciuse/netty,clebertsuconic/netty,sameira/netty,mcanthony/netty,artgon/netty,huuthang1993/netty,drowning/netty,zhoffice/netty,codevelop/netty,shuangqiuan/netty,ioanbsu/netty,shuangqiuan/netty,ngocdaothanh/netty,serioussam/netty,IBYoung/netty,balaprasanna/netty,carl-mastrangelo/netty,liuciuse/netty,dongjiaqiang/netty,andsel/netty,jdivy/netty,altihou/netty,tempbottle/netty,fantayeneh/netty,cnoldtree/netty,gerdriesselmann/netty,altihou/netty
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import static io.netty.buffer.Unpooled.*; import java.io.IOException; import static org.junit.Assert.*; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import org.junit.Test; /** * An abstract test class for composite channel buffers */ public abstract class AbstractCompositeChannelBufferTest extends AbstractChannelBufferTest { private final ByteOrder order; protected AbstractCompositeChannelBufferTest(ByteOrder order) { if (order == null) { throw new NullPointerException("order"); } this.order = order; } private List<ByteBuf> buffers; private ByteBuf buffer; @Override protected ByteBuf newBuffer(int length) { buffers = new ArrayList<ByteBuf>(); for (int i = 0; i < length; i += 10) { buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[1]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[2]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[3]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[4]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[5]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[6]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[7]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[8]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[9]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); } buffer = Unpooled.wrappedBuffer(buffers.toArray(new ByteBuf[buffers.size()])); buffer.writerIndex(length); buffer = Unpooled.wrappedBuffer(buffer); assertEquals(length, buffer.capacity()); assertEquals(length, buffer.readableBytes()); assertFalse(buffer.writable()); buffer.writerIndex(0); return buffer; } @Override protected ByteBuf[] components() { return buffers.toArray(new ByteBuf[buffers.size()]); } // Composite buffer does not waste bandwidth on discardReadBytes, but // the test will fail in strict mode. @Override protected boolean discardReadBytesDoesNotMoveWritableBytes() { return false; } /** * Tests the "getBufferFor" method */ @Test public void testGetBufferFor() throws IOException { CompositeByteBuf buf = (CompositeByteBuf) Unpooled.wrappedBuffer(new byte[] { 1, 2, 3, 4, 5 }, new byte[] {4, 5, 6, 7, 8, 9, 26}); //Ensure that a random place will be fine assertEquals(buf.getBufferFor(2).capacity(), 5); //Loop through each byte byte index = 0; while (index < buf.capacity()) { assertNotNull(buf.getBufferFor(index++)); } } @Test public void testDiscardReadBytes3() { ByteBuf a, b; a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer( wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 0, 5).order(order), wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 5, 5).order(order)); a.skipBytes(6); a.markReaderIndex(); b.skipBytes(6); b.markReaderIndex(); assertEquals(a.readerIndex(), b.readerIndex()); a.readerIndex(a.readerIndex() - 1); b.readerIndex(b.readerIndex() - 1); assertEquals(a.readerIndex(), b.readerIndex()); a.writerIndex(a.writerIndex() - 1); a.markWriterIndex(); b.writerIndex(b.writerIndex() - 1); b.markWriterIndex(); assertEquals(a.writerIndex(), b.writerIndex()); a.writerIndex(a.writerIndex() + 1); b.writerIndex(b.writerIndex() + 1); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); // now discard a.discardReadBytes(); b.discardReadBytes(); assertEquals(a.readerIndex(), b.readerIndex()); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); a.resetReaderIndex(); b.resetReaderIndex(); assertEquals(a.readerIndex(), b.readerIndex()); a.resetWriterIndex(); b.resetWriterIndex(); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); } @Test public void testCompositeWrappedBuffer() { ByteBuf header = dynamicBuffer(12).order(order); ByteBuf payload = dynamicBuffer(512).order(order); header.writeBytes(new byte[12]); payload.writeBytes(new byte[512]); ByteBuf buffer = wrappedBuffer(header, payload); assertTrue(header.readableBytes() == 12); assertTrue(payload.readableBytes() == 512); assertEquals(12 + 512, buffer.readableBytes()); assertFalse(buffer.hasNioBuffer()); } @Test public void testSeveralBuffersEquals() { ByteBuf a, b; //XXX Same tests with several buffers in wrappedCheckedBuffer // Different length. a = wrappedBuffer(new byte[] { 1 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order), wrappedBuffer(new byte[] { 3 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 2).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2 }).order(order), wrappedBuffer(new byte[] { 4 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 2).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order), wrappedBuffer(new byte[] { 4, 5, 6 }).order(order), wrappedBuffer(new byte[] { 7, 8, 9, 10 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 5).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }).order(order), wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 5).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5).order(order)); assertFalse(ByteBufUtil.equals(a, b)); } @Test public void testWrappedBuffer() { assertEquals(16, wrappedBuffer(wrappedBuffer(ByteBuffer.allocateDirect(16))).capacity()); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer(new byte[][] { new byte[] { 1, 2, 3 } }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer( new byte[] { 1 }, new byte[] { 2 }, new byte[] { 3 }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(new ByteBuf[] { wrappedBuffer(new byte[] { 1, 2, 3 }).order(order) })); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer( wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order), wrappedBuffer(new byte[] { 3 }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer(new ByteBuffer[] { ByteBuffer.wrap(new byte[] { 1, 2, 3 }) }))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer( ByteBuffer.wrap(new byte[] { 1 }), ByteBuffer.wrap(new byte[] { 2 }), ByteBuffer.wrap(new byte[] { 3 })))); } @Test public void testWrittenBuffersEquals() { //XXX Same tests than testEquals with written AggregateChannelBuffers ByteBuf a, b; // Different length. a = wrappedBuffer(new byte[] { 1 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }, new byte[1]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 2 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }, new byte[2]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 2); b.writeBytes( wrappedBuffer(new byte[] { 2 }).order(order)); b.writeBytes(wrappedBuffer(new byte[] { 3 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 3).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2 }, new byte[1]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 4 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 3).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }, new byte[7]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 7); b.writeBytes( wrappedBuffer(new byte[] { 4, 5, 6 }).order(order)); b.writeBytes( wrappedBuffer(new byte[] { 7, 8, 9, 10 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 10).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }, new byte[5]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 10).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5).order(order)); assertFalse(ByteBufUtil.equals(a, b)); } }
buffer/src/test/java/io/netty/buffer/AbstractCompositeChannelBufferTest.java
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import static io.netty.buffer.Unpooled.*; import static org.junit.Assert.*; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import org.junit.Test; /** * An abstract test class for composite channel buffers */ public abstract class AbstractCompositeChannelBufferTest extends AbstractChannelBufferTest { private final ByteOrder order; protected AbstractCompositeChannelBufferTest(ByteOrder order) { if (order == null) { throw new NullPointerException("order"); } this.order = order; } private List<ByteBuf> buffers; private ByteBuf buffer; @Override protected ByteBuf newBuffer(int length) { buffers = new ArrayList<ByteBuf>(); for (int i = 0; i < length; i += 10) { buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[1]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[2]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[3]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[4]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[5]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[6]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[7]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[8]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); buffers.add(Unpooled.wrappedBuffer(new byte[9]).order(order)); buffers.add(Unpooled.EMPTY_BUFFER); } buffer = Unpooled.wrappedBuffer(buffers.toArray(new ByteBuf[buffers.size()])); buffer.writerIndex(length); buffer = Unpooled.wrappedBuffer(buffer); assertEquals(length, buffer.capacity()); assertEquals(length, buffer.readableBytes()); assertFalse(buffer.writable()); buffer.writerIndex(0); return buffer; } @Override protected ByteBuf[] components() { return buffers.toArray(new ByteBuf[buffers.size()]); } // Composite buffer does not waste bandwidth on discardReadBytes, but // the test will fail in strict mode. @Override protected boolean discardReadBytesDoesNotMoveWritableBytes() { return false; } @Test public void testDiscardReadBytes3() { ByteBuf a, b; a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer( wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 0, 5).order(order), wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 5, 5).order(order)); a.skipBytes(6); a.markReaderIndex(); b.skipBytes(6); b.markReaderIndex(); assertEquals(a.readerIndex(), b.readerIndex()); a.readerIndex(a.readerIndex() - 1); b.readerIndex(b.readerIndex() - 1); assertEquals(a.readerIndex(), b.readerIndex()); a.writerIndex(a.writerIndex() - 1); a.markWriterIndex(); b.writerIndex(b.writerIndex() - 1); b.markWriterIndex(); assertEquals(a.writerIndex(), b.writerIndex()); a.writerIndex(a.writerIndex() + 1); b.writerIndex(b.writerIndex() + 1); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); // now discard a.discardReadBytes(); b.discardReadBytes(); assertEquals(a.readerIndex(), b.readerIndex()); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); a.resetReaderIndex(); b.resetReaderIndex(); assertEquals(a.readerIndex(), b.readerIndex()); a.resetWriterIndex(); b.resetWriterIndex(); assertEquals(a.writerIndex(), b.writerIndex()); assertTrue(ByteBufUtil.equals(a, b)); } @Test public void testCompositeWrappedBuffer() { ByteBuf header = dynamicBuffer(12).order(order); ByteBuf payload = dynamicBuffer(512).order(order); header.writeBytes(new byte[12]); payload.writeBytes(new byte[512]); ByteBuf buffer = wrappedBuffer(header, payload); assertTrue(header.readableBytes() == 12); assertTrue(payload.readableBytes() == 512); assertEquals(12 + 512, buffer.readableBytes()); assertFalse(buffer.hasNioBuffer()); } @Test public void testSeveralBuffersEquals() { ByteBuf a, b; //XXX Same tests with several buffers in wrappedCheckedBuffer // Different length. a = wrappedBuffer(new byte[] { 1 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order), wrappedBuffer(new byte[] { 3 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 2).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2 }).order(order), wrappedBuffer(new byte[] { 4 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 2).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order), wrappedBuffer(new byte[] { 4, 5, 6 }).order(order), wrappedBuffer(new byte[] { 7, 8, 9, 10 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 5).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }).order(order), wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 5).order(order), wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5).order(order)); assertFalse(ByteBufUtil.equals(a, b)); } @Test public void testWrappedBuffer() { assertEquals(16, wrappedBuffer(wrappedBuffer(ByteBuffer.allocateDirect(16))).capacity()); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer(new byte[][] { new byte[] { 1, 2, 3 } }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer( new byte[] { 1 }, new byte[] { 2 }, new byte[] { 3 }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(new ByteBuf[] { wrappedBuffer(new byte[] { 1, 2, 3 }).order(order) })); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer( wrappedBuffer(new byte[] { 1 }).order(order), wrappedBuffer(new byte[] { 2 }).order(order), wrappedBuffer(new byte[] { 3 }).order(order))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer(new ByteBuffer[] { ByteBuffer.wrap(new byte[] { 1, 2, 3 }) }))); assertEquals( wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)), wrappedBuffer(wrappedBuffer( ByteBuffer.wrap(new byte[] { 1 }), ByteBuffer.wrap(new byte[] { 2 }), ByteBuffer.wrap(new byte[] { 3 })))); } @Test public void testWrittenBuffersEquals() { //XXX Same tests than testEquals with written AggregateChannelBuffers ByteBuf a, b; // Different length. a = wrappedBuffer(new byte[] { 1 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }, new byte[1]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 2 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1 }, new byte[2]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 2); b.writeBytes( wrappedBuffer(new byte[] { 2 }).order(order)); b.writeBytes(wrappedBuffer(new byte[] { 3 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 3).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2 }, new byte[1]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 4 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, short length. a = wrappedBuffer(new byte[] { 1, 2, 3 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 3).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 1); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Same content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }, new byte[7]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 7); b.writeBytes( wrappedBuffer(new byte[] { 4, 5, 6 }).order(order)); b.writeBytes( wrappedBuffer(new byte[] { 7, 8, 9, 10 }).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Same content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 10).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5).order(order)); assertTrue(ByteBufUtil.equals(a, b)); // Different content, same firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }, new byte[5]).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 }).order(order)); assertFalse(ByteBufUtil.equals(a, b)); // Different content, different firstIndex, long length. a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order); b = wrappedBuffer(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 10).order(order)); // to enable writeBytes b.writerIndex(b.writerIndex() - 5); b.writeBytes( wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5).order(order)); assertFalse(ByteBufUtil.equals(a, b)); } }
Provide a basic test for getBufferFor()
buffer/src/test/java/io/netty/buffer/AbstractCompositeChannelBufferTest.java
Provide a basic test for getBufferFor()
<ide><path>uffer/src/test/java/io/netty/buffer/AbstractCompositeChannelBufferTest.java <ide> package io.netty.buffer; <ide> <ide> import static io.netty.buffer.Unpooled.*; <add>import java.io.IOException; <ide> import static org.junit.Assert.*; <ide> <ide> import java.nio.ByteBuffer; <ide> @Override <ide> protected boolean discardReadBytesDoesNotMoveWritableBytes() { <ide> return false; <add> } <add> <add> /** <add> * Tests the "getBufferFor" method <add> */ <add> @Test <add> public void testGetBufferFor() throws IOException { <add> CompositeByteBuf buf = (CompositeByteBuf) Unpooled.wrappedBuffer(new byte[] { 1, 2, 3, 4, 5 }, new byte[] {4, 5, 6, 7, 8, 9, 26}); <add> <add> //Ensure that a random place will be fine <add> assertEquals(buf.getBufferFor(2).capacity(), 5); <add> <add> //Loop through each byte <add> <add> byte index = 0; <add> <add> while (index < buf.capacity()) { <add> assertNotNull(buf.getBufferFor(index++)); <add> } <ide> } <ide> <ide> @Test
Java
apache-2.0
8a26e253f9b45d4dc757d12f5f5653874547aa23
0
gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa
/* * Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package ae3.model; import uk.ac.ebi.gxa.exceptions.LogUtil; import uk.ac.ebi.gxa.netcdf.reader.NetCDFProxy; import uk.ac.ebi.gxa.utils.EfvTree; import uk.ac.ebi.gxa.utils.EscapeUtil; import uk.ac.ebi.microarray.atlas.model.UpDownExpression; import java.io.IOException; import java.util.List; /** * Lazy expression statistics class * * @author pashky */ public class ExpressionStats { private final NetCDFProxy proxy; private final EfvTree<Integer> efvTree = new EfvTree<Integer>(); private EfvTree<Stat> lastData; private long lastDesignElement = -1; ExpressionStats(NetCDFProxy proxy) throws IOException { this.proxy = proxy; final List<String> uvals = proxy.getUniqueValues(); int valueIndex = 0; for (String uval : proxy.getUniqueValues()) { final String[] pair = uval.split(NetCDFProxy.NCDF_PROP_VAL_SEP_REGEX); if (pair.length != 2) { throw LogUtil.createUnexpected("uVAL '" + uval + "'" + " does not match '.*||.*'"); } efvTree.put(normalized(pair[0], "ba_"), pair[1], valueIndex); ++valueIndex; } } private static String normalized(String name, String prefix) { if (name.startsWith(prefix)) { name = name.substring(prefix.length()); } return EscapeUtil.encode(name); } /** * Gets {@link uk.ac.ebi.gxa.utils.EfvTree} of expression statistics structures * * @param designElementId design element id * @return efv tree of stats */ EfvTree<Stat> getExpressionStats(int designElementId) { if (lastData != null && designElementId == lastDesignElement) return lastData; try { final float[] pvals = proxy.getPValuesForDesignElement(designElementId); final float[] tstats = proxy.getTStatisticsForDesignElement(designElementId); final EfvTree<Stat> result = new EfvTree<Stat>(); for (EfvTree.EfEfv<Integer> efefv : efvTree.getNameSortedList()) { float pvalue = pvals[efefv.getPayload()]; float tstat = tstats[efefv.getPayload()]; if (tstat > 1e-8 || tstat < -1e-8) { result.put(efefv.getEf(), efefv.getEfv(), new Stat(tstat, pvalue)); } } lastDesignElement = designElementId; lastData = result; return result; } catch (IOException e) { throw LogUtil.createUnexpected("Exception during pvalue/tstat load", e); } catch (ArrayIndexOutOfBoundsException e) { throw LogUtil.createUnexpected("Exception during pvalue/tstat load", e); } } /** * Expression statistics for ef/efv pair for one design element */ public static class Stat implements Comparable<Stat> { private final float pvalue; private final float tstat; /** * Constructor * * @param tstat t-statistics * @param pvalue p-value */ public Stat(float tstat, float pvalue) { this.pvalue = pvalue; this.tstat = tstat; } /** * Gets p-value * * @return p-value */ public float getPvalue() { return pvalue; } /** * Gets t-statistics * * @return t-statistics value */ public float getTstat() { return tstat; } /** * Returns whether gene is over-expressed or under-expressed * * @return gene expression */ public UpDownExpression getExpression() { return UpDownExpression.valueOf(pvalue, tstat); } /** * Useful, as {@link uk.ac.ebi.gxa.utils.EfvTree} can return elements sorted by value. * P-value of statistics, in this case. * * @param o other object * @return 1, 0 or -1 */ public int compareTo(Stat o) { return Float.valueOf(getPvalue()).compareTo(o.getPvalue()); } @Override public boolean equals(Object obj) { return obj instanceof Stat && compareTo((Stat) obj) == 0; } @Override public int hashCode() { return pvalue != +0.0f ? Float.floatToIntBits(pvalue) : 0; } } }
atlas-web/src/main/java/ae3/model/ExpressionStats.java
/* * Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package ae3.model; import uk.ac.ebi.gxa.exceptions.LogUtil; import uk.ac.ebi.gxa.netcdf.reader.NetCDFProxy; import uk.ac.ebi.gxa.utils.EfvTree; import uk.ac.ebi.gxa.utils.EscapeUtil; import uk.ac.ebi.microarray.atlas.model.UpDownExpression; import java.io.IOException; import java.util.List; /** * Lazy expression statistics class * * @author pashky */ public class ExpressionStats { private final NetCDFProxy proxy; private final EfvTree<Integer> efvTree = new EfvTree<Integer>(); private EfvTree<Stat> lastData; private long lastDesignElement = -1; ExpressionStats(NetCDFProxy proxy) throws IOException { this.proxy = proxy; final List<String> uvals = proxy.getUniqueValues(); int valueIndex = 0; for (String uval : proxy.getUniqueValues()) { final String[] pair = uval.split(NetCDFProxy.NCDF_PROP_VAL_SEP_REGEX); if (pair.length != 2) { throw LogUtil.createUnexpected("uVAL '" + uval + "'" + " does not match '.*||.*'"); } efvTree.put(normalized(pair[0], "ba_"), pair[1], valueIndex); ++valueIndex; } } private String[] getFactorsAndCharacteristics(NetCDFProxy proxy) throws IOException { final String[] result = proxy.getFactorsAndCharacteristics(); // Ensure backwards compatibility return result.length != 0 ? result : proxy.getFactors(); } private static String normalized(String name, String prefix) { if (name.startsWith(prefix)) { name = name.substring(prefix.length()); } return EscapeUtil.encode(name); } /** * Gets {@link uk.ac.ebi.gxa.utils.EfvTree} of expression statistics structures * * @param designElementId design element id * @return efv tree of stats */ EfvTree<Stat> getExpressionStats(int designElementId) { if (lastData != null && designElementId == lastDesignElement) return lastData; try { final float[] pvals = proxy.getPValuesForDesignElement(designElementId); final float[] tstats = proxy.getTStatisticsForDesignElement(designElementId); final EfvTree<Stat> result = new EfvTree<Stat>(); for (EfvTree.EfEfv<Integer> efefv : efvTree.getNameSortedList()) { float pvalue = pvals[efefv.getPayload()]; float tstat = tstats[efefv.getPayload()]; if (tstat > 1e-8 || tstat < -1e-8) { result.put(efefv.getEf(), efefv.getEfv(), new Stat(tstat, pvalue)); } } lastDesignElement = designElementId; lastData = result; return result; } catch (IOException e) { throw LogUtil.createUnexpected("Exception during pvalue/tstat load", e); } catch (ArrayIndexOutOfBoundsException e) { throw LogUtil.createUnexpected("Exception during pvalue/tstat load", e); } } /** * Expression statistics for ef/efv pair for one design element */ public static class Stat implements Comparable<Stat> { private final float pvalue; private final float tstat; /** * Constructor * * @param tstat t-statistics * @param pvalue p-value */ public Stat(float tstat, float pvalue) { this.pvalue = pvalue; this.tstat = tstat; } /** * Gets p-value * * @return p-value */ public float getPvalue() { return pvalue; } /** * Gets t-statistics * * @return t-statistics value */ public float getTstat() { return tstat; } /** * Returns whether gene is over-expressed or under-expressed * * @return gene expression */ public UpDownExpression getExpression() { return UpDownExpression.valueOf(pvalue, tstat); } /** * Useful, as {@link uk.ac.ebi.gxa.utils.EfvTree} can return elements sorted by value. * P-value of statistics, in this case. * * @param o other object * @return 1, 0 or -1 */ public int compareTo(Stat o) { return Float.valueOf(getPvalue()).compareTo(o.getPvalue()); } @Override public boolean equals(Object obj) { return obj instanceof Stat && compareTo((Stat) obj) == 0; } @Override public int hashCode() { return pvalue != +0.0f ? Float.floatToIntBits(pvalue) : 0; } } }
cleanup: unused method has been removed
atlas-web/src/main/java/ae3/model/ExpressionStats.java
cleanup: unused method has been removed
<ide><path>tlas-web/src/main/java/ae3/model/ExpressionStats.java <ide> efvTree.put(normalized(pair[0], "ba_"), pair[1], valueIndex); <ide> ++valueIndex; <ide> } <del> } <del> <del> private String[] getFactorsAndCharacteristics(NetCDFProxy proxy) throws IOException { <del> final String[] result = proxy.getFactorsAndCharacteristics(); <del> // Ensure backwards compatibility <del> return result.length != 0 ? result : proxy.getFactors(); <ide> } <ide> <ide> private static String normalized(String name, String prefix) {
Java
apache-2.0
bd712f799e2187f85ef2eae37d0ad6f8955344c3
0
dhirajsb/fabric8,sobkowiak/fabric8,sobkowiak/fabric8,rhuss/fabric8,dhirajsb/fabric8,dhirajsb/fabric8,rhuss/fabric8,christian-posta/fabric8,chirino/fabric8v2,zmhassan/fabric8,rhuss/fabric8,KurtStam/fabric8,christian-posta/fabric8,rhuss/fabric8,KurtStam/fabric8,chirino/fabric8v2,rajdavies/fabric8,KurtStam/fabric8,rajdavies/fabric8,zmhassan/fabric8,christian-posta/fabric8,sobkowiak/fabric8,christian-posta/fabric8,rajdavies/fabric8,zmhassan/fabric8,chirino/fabric8v2,zmhassan/fabric8,sobkowiak/fabric8,chirino/fabric8v2,KurtStam/fabric8,dhirajsb/fabric8,rajdavies/fabric8
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.kubernetes.api; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import io.fabric8.kubernetes.api.extensions.Templates; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.ContainerPort; import io.fabric8.kubernetes.api.model.ContainerState; import io.fabric8.kubernetes.api.model.ContainerStateRunning; import io.fabric8.kubernetes.api.model.ContainerStateTerminated; import io.fabric8.kubernetes.api.model.ContainerStateWaiting; import io.fabric8.kubernetes.api.model.ContainerStatus; import io.fabric8.kubernetes.api.model.EnvVar; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.IntOrString; import io.fabric8.kubernetes.api.model.KubernetesList; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodList; import io.fabric8.kubernetes.api.model.PodSpec; import io.fabric8.kubernetes.api.model.PodStatus; import io.fabric8.kubernetes.api.model.PodTemplateSpec; import io.fabric8.kubernetes.api.model.ReplicationController; import io.fabric8.kubernetes.api.model.ReplicationControllerList; import io.fabric8.kubernetes.api.model.ReplicationControllerSpec; import io.fabric8.kubernetes.api.model.RootPaths; import io.fabric8.kubernetes.api.model.Secret; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.ServiceList; import io.fabric8.kubernetes.api.model.ServicePort; import io.fabric8.kubernetes.api.model.ServiceSpec; import io.fabric8.kubernetes.client.Config; import io.fabric8.kubernetes.client.ConfigBuilder; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.internal.Utils; import io.fabric8.openshift.api.model.DeploymentConfig; import io.fabric8.openshift.api.model.DeploymentConfigSpec; import io.fabric8.openshift.api.model.OAuthClient; import io.fabric8.openshift.api.model.Parameter; import io.fabric8.openshift.api.model.Route; import io.fabric8.openshift.api.model.RouteList; import io.fabric8.openshift.api.model.RouteSpec; import io.fabric8.openshift.api.model.Template; import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.utils.Files; import io.fabric8.utils.Filter; import io.fabric8.utils.Filters; import io.fabric8.utils.Objects; import io.fabric8.utils.Strings; import io.fabric8.utils.Systems; import io.fabric8.utils.ssl.TrustEverythingSSLTrustManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xbill.DNS.ARecord; import org.xbill.DNS.Lookup; import org.xbill.DNS.Record; import org.xbill.DNS.SRVRecord; import org.xbill.DNS.TextParseException; import org.xbill.DNS.Type; import javax.net.ssl.SSLException; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLKeyException; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLProtocolException; import javax.net.ssl.SSLSocketFactory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URL; import java.net.UnknownHostException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static io.fabric8.utils.Lists.notNullList; import static io.fabric8.utils.Strings.isNullOrBlank; /** * Kubernetes utility methods. */ public final class KubernetesHelper { public static final String KUBERNETES_NAMESPACE_SYSTEM_PROPERTY = "kubernetes.namespace"; public static final String KUBERNETES_NAMESPACE_ENV = "KUBERNETES_NAMESPACE"; public static final String DEFAULT_NAMESPACE = "default"; private static final transient Logger LOG = LoggerFactory.getLogger(KubernetesHelper.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); public static final int INTORSTRING_KIND_INT = 0; public static final int INTORSTRING_KIND_STRING = 1; public static final String DEFAULT_DOCKER_HOST = "tcp://localhost:2375"; protected static SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX"); public static final String defaultApiVersion = "v1"; public static final String defaultOsApiVersion = "v1"; private static final String HOST_SUFFIX = "_SERVICE_HOST"; private static final String PORT_SUFFIX = "_SERVICE_PORT"; private static final String PROTO_SUFFIX = "_TCP_PROTO"; public static final String DEFAULT_PROTO = "tcp"; private static final ConcurrentMap<URL, Boolean> IS_OPENSHIFT = new ConcurrentHashMap<>(); private static final Config CONFIG = new ConfigBuilder().build(); public static String defaultNamespace() { return CONFIG.getNamespace(); } /** * Returns the ID of the given object */ public static String getObjectId(Object object) { if (object instanceof HasMetadata) { return getName((HasMetadata) object); } else { return object != null ? object.toString() : null; } } public static ObjectMeta getOrCreateMetadata(HasMetadata entity) { ObjectMeta metadata = entity.getMetadata(); if (metadata == null) { metadata = new ObjectMeta(); entity.setMetadata(metadata); } return metadata; } /** * Returns the resource version for the entity or null if it does not have one */ public static String getResourceVersion(HasMetadata entity) { if (entity != null) { ObjectMeta metadata = entity.getMetadata(); if (metadata != null) { String resourceVersion = metadata.getResourceVersion(); if (Strings.isNotBlank(resourceVersion)) { return resourceVersion; } } } return null; } /** * Returns true if this entity has a valid non blank resourceVersion in its metadata */ public static boolean hasResourceVersion(HasMetadata entity) { return getResourceVersion(entity) != null; } public static String getName(ObjectMeta entity) { if (entity != null) { return Strings.firstNonBlank(entity.getName(), getAdditionalPropertyText(entity.getAdditionalProperties(), "id"), entity.getUid()); } else { return null; } } /** * Returns the kind of the entity */ public static String getKind(HasMetadata entity) { if (entity != null) { // TODO use reflection to find the kind? if (entity instanceof KubernetesList) { return "List"; } else { return entity.getClass().getSimpleName(); } } else { return null; } } public static String getName(HasMetadata entity) { if (entity != null) { return getName(entity.getMetadata()); } else { return null; } } public static void setName(HasMetadata entity, String name) { getOrCreateMetadata(entity).setName(name); } public static void setName(HasMetadata entity, String namespace, String name) { ObjectMeta metadata = getOrCreateMetadata(entity); metadata.setNamespace(namespace); metadata.setName(name); } public static void setNamespace(HasMetadata entity, String namespace) { getOrCreateMetadata(entity).setNamespace(namespace); } public static String getNamespace(ObjectMeta entity) { if (entity != null) { return entity.getNamespace(); } else { return null; } } public static String getNamespace(HasMetadata entity) { if (entity != null) { return getNamespace(entity.getMetadata()); } else { return null; } } public static Map<String, String> getOrCreateAnnotations(HasMetadata entity) { ObjectMeta metadata = getOrCreateMetadata(entity); Map<String, String> answer = metadata.getAnnotations(); if (answer == null) { // use linked so the annotations can be in the FIFO order answer = new LinkedHashMap<>(); metadata.setAnnotations(answer); } return answer; } public static Map<String, String> getOrCreateLabels(HasMetadata entity) { ObjectMeta metadata = getOrCreateMetadata(entity); Map<String, String> answer = metadata.getLabels(); if (answer == null) { // use linked so the annotations can be in the FIFO order answer = new LinkedHashMap<>(); metadata.setLabels(answer); } return answer; } /** * Returns the labels of the given metadata object or an empty map if the metadata or labels are null */ @SuppressWarnings("unchecked") public static Map<String, String> getLabels(ObjectMeta metadata) { if (metadata != null) { Map<String, String> labels = metadata.getLabels(); if (labels != null) { return labels; } } return Collections.EMPTY_MAP; } @SuppressWarnings("unchecked") public static Map<String, String> getLabels(HasMetadata entity) { if (entity != null) { return getLabels(entity.getMetadata()); } return Collections.EMPTY_MAP; } public static ServiceSpec getOrCreateSpec(Service entity) { ServiceSpec spec = entity.getSpec(); if (spec == null) { spec = new ServiceSpec(); entity.setSpec(spec); } return spec; } public static String getPortalIP(Service entity) { String answer = null; if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); return spec.getClusterIP(); } return answer; } @SuppressWarnings("unchecked") public static Map<String, String> getSelector(Service entity) { Map<String, String> answer = null; if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); answer = spec.getSelector(); } return answer != null ? answer : Collections.EMPTY_MAP; } public static void setSelector(Service entity, Map<String, String> labels) { ServiceSpec spec = getOrCreateSpec(entity); spec.setSelector(labels); } public static Set<Integer> getPorts(Service entity) { Set<Integer> answer = new HashSet<>(); if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); for (ServicePort port : spec.getPorts()) { answer.add(port.getPort()); } } return answer; } protected static String getAdditionalPropertyText(Map<String, Object> additionalProperties, String name) { if (additionalProperties != null) { Object value = additionalProperties.get(name); if (value != null) { return value.toString(); } } return null; } protected static Map<String, Object> getMetadata(Map<String, Object> additionalProperties, boolean create) { Map<String, Object> answer = getAdditionalPropertyMap(additionalProperties, "metadata"); if (answer == null) { answer = new LinkedHashMap<>(); if (create) { additionalProperties.put("metadata", answer); } } return answer; } @SuppressWarnings("unchecked") protected static Map<String, Object> getAdditionalPropertyMap(Map<String, Object> additionalProperties, String name) { if (additionalProperties != null) { Object value = additionalProperties.get(name); if (value instanceof Map) { return (Map<String, Object>) value; } } return null; } public static String getDockerIp() { String url = resolveDockerHost(); int idx = url.indexOf("://"); if (idx > 0) { url = url.substring(idx + 3); } idx = url.indexOf(":"); if (idx > 0) { url = url.substring(0, idx); } return url; } public static String resolveDockerHost() { String dockerHost = System.getenv("DOCKER_HOST"); if (isNullOrBlank(dockerHost)) { dockerHost = System.getProperty("docker.host"); } if (isNullOrBlank(dockerHost)) { return DEFAULT_DOCKER_HOST; } else { return dockerHost; } } public static String toJson(Object dto) throws JsonProcessingException { Class<?> clazz = dto.getClass(); return OBJECT_MAPPER.writerFor(clazz).writeValueAsString(dto); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(File file) throws IOException { byte[] data = Files.readBytes(file); return loadJson(data); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(InputStream in) throws IOException { byte[] data = Files.readBytes(in); return loadJson(data); } public static Object loadJson(String json) throws IOException { byte[] data = json.getBytes(); return loadJson(data); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(byte[] json) throws IOException { if (json != null && json.length > 0) { return OBJECT_MAPPER.readerFor(KubernetesResource.class).readValue(json); } return null; } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(InputStream in, Class<T> clazz) throws IOException { byte[] data = Files.readBytes(in); return loadYaml(data, clazz); } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(File file, Class<T> clazz) throws IOException { byte[] data = Files.readBytes(file); return loadYaml(data, clazz); } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(byte[] data, Class<T> clazz) throws IOException { ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); return mapper.readValue(data, clazz); } /** * Loads the Kubernetes JSON and converts it to a list of entities */ @SuppressWarnings("unchecked") public static List<HasMetadata> toItemList(Object entity) throws IOException { if (entity instanceof List) { return (List<HasMetadata>) entity; } else if (entity instanceof HasMetadata[]) { HasMetadata[] array = (HasMetadata[]) entity; return Arrays.asList(array); } else if (entity instanceof KubernetesList) { KubernetesList config = (KubernetesList) entity; return config.getItems(); } else if (entity instanceof Template) { Template objects = (Template) entity; return objects.getObjects(); } else { List<HasMetadata> answer = new ArrayList<>(); if (entity instanceof HasMetadata) { answer.add((HasMetadata) entity); } return answer; } } /** * Saves the json object to the given file */ public static void saveJson(File json, Object object) throws IOException { OBJECT_MAPPER.writer().withDefaultPrettyPrinter().writeValue(json, object); } /** * Returns a map indexed by pod id of the pods */ public static Map<String, Pod> toPodMap(PodList podSchema) { return toFilteredPodMap(podSchema, Filters.<Pod>trueFilter()); } protected static Map<String, Pod> toFilteredPodMap(PodList podSchema, Filter<Pod> filter) { List<Pod> list = podSchema != null ? podSchema.getItems() : null; List<Pod> filteredList = Filters.filter(list, filter); return toPodMap(filteredList); } /** * Returns a map indexed by pod id of the pods */ public static Map<String, Pod> toPodMap(List<Pod> pods) { List<Pod> list = notNullList(pods); Map<String, Pod> answer = new HashMap<>(); for (Pod pod : list) { String id = getName(pod); if (Strings.isNotBlank(id)) { answer.put(id, pod); } } return answer; } /** * Returns a map indexed by service id of the services */ public static Map<String, Service> toServiceMap(ServiceList serviceSchema) { return toServiceMap(serviceSchema != null ? serviceSchema.getItems() : null); } /** * Returns a map indexed by service id of the services */ public static Map<String, Service> toServiceMap(List<Service> services) { List<Service> list = notNullList(services); Map<String, Service> answer = new HashMap<>(); for (Service service : list) { String id = getName(service); if (Strings.isNotBlank(id)) { answer.put(id, service); } } return answer; } public static Map<String, Service> toFilteredServiceMap(ServiceList serviceList, Filter<Service> filter) { List<Service> list = serviceList != null ? serviceList.getItems() : null; List<Service> filteredList = Filters.filter(list, filter); return toServiceMap(filteredList); } /** * Returns a map indexed by replicationController id of the replicationControllers */ public static Map<String, ReplicationController> toReplicationControllerMap(ReplicationControllerList replicationControllerSchema) { Filter<ReplicationController> filter = createReplicationControllerFilter((String) null); return toFilteredReplicationControllerMap(replicationControllerSchema, filter); } protected static Map<String, ReplicationController> toFilteredReplicationControllerMap(ReplicationControllerList replicationControllerSchema, Filter<ReplicationController> filter) { List<ReplicationController> list = replicationControllerSchema != null ? replicationControllerSchema.getItems() : null; List<ReplicationController> filteredList = Filters.filter(list, filter); return toReplicationControllerMap(filteredList); } /** * Returns a map indexed by replicationController id of the replicationControllers */ public static Map<String, ReplicationController> toReplicationControllerMap(List<ReplicationController> replicationControllers) { List<ReplicationController> list = notNullList(replicationControllers); Map<String, ReplicationController> answer = new HashMap<>(); for (ReplicationController replicationControllerSchema : list) { String id = getName(replicationControllerSchema); if (Strings.isNotBlank(id)) { answer.put(id, replicationControllerSchema); } } return answer; } public static Map<String, Pod> getPodMap(KubernetesClient kubernetes) { return getPodMap(kubernetes, null); } public static Map<String, Pod> getPodMap(KubernetesClient kubernetes, String namespace) { PodList pods = null; try { pods = kubernetes.pods().inNamespace(namespace).list(); } catch (KubernetesClientException e) { if (e.getCode() == 404) { // ignore not found } else { throw e; } } return toPodMap(pods); } public static Map<String, Pod> getSelectedPodMap(KubernetesClient kubernetes, String selector) { return getSelectedPodMap(kubernetes, null, selector); } public static Map<String, Pod> getSelectedPodMap(KubernetesClient kubernetes, String namespace, String selector) { return getFilteredPodMap(kubernetes, namespace, createPodFilter(selector)); } public static Map<String, Pod> getFilteredPodMap(KubernetesClient kubernetes, Filter<Pod> filter) { return getFilteredPodMap(kubernetes, null, filter); } public static Map<String, Pod> getFilteredPodMap(KubernetesClient kubernetes, String namespace, Filter<Pod> filter) { return toFilteredPodMap(kubernetes.pods().inNamespace(namespace).list(), filter); } public static Map<String, Service> getServiceMap(KubernetesClient kubernetes) { return getServiceMap(kubernetes, null); } public static Map<String, Service> getServiceMap(KubernetesClient kubernetes, String namespace) { return toServiceMap(kubernetes.services().inNamespace(namespace).list()); } public static Map<String, ReplicationController> getReplicationControllerMap(KubernetesClient kubernetes) { return getReplicationControllerMap(kubernetes, null); } public static Map<String, ReplicationController> getReplicationControllerMap(KubernetesClient kubernetes, String namespace) { return toReplicationControllerMap(kubernetes.replicationControllers().inNamespace(namespace).list()); } public static Map<String, ReplicationController> getSelectedReplicationControllerMap(KubernetesClient kubernetes, String selector) { return getSelectedReplicationControllerMap(kubernetes, null, selector); } public static Map<String, ReplicationController> getSelectedReplicationControllerMap(KubernetesClient kubernetes, String namespace, String selector) { return toReplicationControllerMap(kubernetes.replicationControllers().inNamespace(namespace).withLabels(toLabelsMap(selector)).list()); } /** * Removes empty pods returned by Kubernetes */ public static void removeEmptyPods(PodList podSchema) { List<Pod> list = notNullList(podSchema.getItems()); List<Pod> removeItems = new ArrayList<Pod>(); for (Pod pod : list) { if (Strings.isNullOrBlank(getName(pod))) { removeItems.add(pod); } } list.removeAll(removeItems); } /** * Returns the pod id for the given container id */ public static String containerNameToPodId(String containerName) { // TODO use prefix? return containerName; } /** * Returns a string for the labels using "," to separate values */ public static String toLabelsString(Map<String, String> labelMap) { StringBuilder buffer = new StringBuilder(); if (labelMap != null) { Set<Map.Entry<String, String>> entries = labelMap.entrySet(); for (Map.Entry<String, String> entry : entries) { if (buffer.length() > 0) { buffer.append(","); } buffer.append(entry.getKey()); buffer.append("="); buffer.append(entry.getValue()); } } return buffer.toString(); } public static Map<String, String> toLabelsMap(String labels) { Map<String, String> map = new HashMap<>(); if (labels != null && !labels.isEmpty()) { String[] elements = labels.split(","); if (elements.length > 0) { for (String str : elements) { String[] keyValue = str.split("="); if (keyValue.length == 2) { String key = keyValue[0]; String value = keyValue[1]; if (key != null && value != null) { map.put(key.trim(), value.trim()); } } } } } return map; } /** * Creates a filter on a pod using the given text string */ public static Filter<Pod> createPodFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodFilter(" + textFilter + ")"; } public boolean matches(Pod entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a pod using the given set of labels */ public static Filter<Pod> createPodFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodFilter(" + labelSelector + ")"; } public boolean matches(Pod entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a pod annotations using the given set of attribute values */ public static Filter<Pod> createPodAnnotationFilter(final Map<String, String> annotationSelector) { if (annotationSelector == null || annotationSelector.isEmpty()) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodAnnotationFilter(" + annotationSelector + ")"; } public boolean matches(Pod entity) { return filterLabels(annotationSelector, entity.getMetadata().getAnnotations()); } }; } } /** * Creates a filter on a service using the given text string */ public static Filter<Service> createServiceFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "ServiceFilter(" + textFilter + ")"; } public boolean matches(Service entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a service if it matches the given namespace */ public static Filter<Service> createNamespaceServiceFilter(final String namespace) { if (isNullOrBlank(namespace)) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "NamespaceServiceFilter(" + namespace + ")"; } public boolean matches(Service entity) { return Objects.equal(namespace, getNamespace(entity.getMetadata())); } }; } } /** * Creates a filter on a service using the given text string */ public static Filter<Service> createServiceFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "ServiceFilter(" + labelSelector + ")"; } public boolean matches(Service entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a replicationController using the given text string */ public static Filter<ReplicationController> createReplicationControllerFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<ReplicationController>trueFilter(); } else { return new Filter<ReplicationController>() { public String toString() { return "ReplicationControllerFilter(" + textFilter + ")"; } public boolean matches(ReplicationController entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a replicationController using the given text string */ public static Filter<ReplicationController> createReplicationControllerFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<ReplicationController>trueFilter(); } else { return new Filter<ReplicationController>() { public String toString() { return "ReplicationControllerFilter(" + labelSelector + ")"; } public boolean matches(ReplicationController entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Returns true if the given textFilter matches either the id or the labels */ public static boolean filterMatchesIdOrLabels(String textFilter, String id, Map<String, String> labels) { String text = toLabelsString(labels); boolean result = (text != null && text.contains(textFilter)) || (id != null && id.contains(textFilter)); if (!result) { //labels can be in different order to selector Map<String, String> selectorMap = toLabelsMap(textFilter); if (!selectorMap.isEmpty() && labels != null && !labels.isEmpty()) { result = true; for (Map.Entry<String, String> entry : selectorMap.entrySet()) { String value = labels.get(entry.getKey()); if (value == null || !value.matches(entry.getValue())) { result = false; break; } } } } return result; } /** * Returns true if the given filterLabels matches the actual labels */ public static boolean filterLabels(Map<String, String> filterLabels, Map<String, String> labels) { if (labels == null) { return false; } Set<Map.Entry<String, String>> entries = filterLabels.entrySet(); for (Map.Entry<String, String> entry : entries) { String key = entry.getKey(); String expectedValue = entry.getValue(); String actualValue = labels.get(key); if (!Objects.equal(expectedValue, actualValue)) { return false; } } return true; } /** * For positive non-zero values return the text of the number or return blank */ public static String toPositiveNonZeroText(Integer port) { if (port != null) { int value = port; if (value > 0) { return "" + value; } } return ""; } /** * Returns all the containers from the given pod */ @SuppressWarnings("unchecked") public static List<Container> getContainers(Pod pod) { if (pod != null) { PodSpec podSpec = pod.getSpec(); return getContainers(podSpec); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller */ @SuppressWarnings("unchecked") public static List<Container> getContainers(ReplicationController replicationController) { if (replicationController != null) { ReplicationControllerSpec replicationControllerSpec = replicationController.getSpec(); return getContainers(replicationControllerSpec); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller's replicationControllerSpec */ @SuppressWarnings("unchecked") public static List<Container> getContainers(ReplicationControllerSpec replicationControllerSpec) { if (replicationControllerSpec != null) { PodTemplateSpec podTemplateSpec = replicationControllerSpec.getTemplate(); return getContainers(podTemplateSpec); } return Collections.EMPTY_LIST; } @SuppressWarnings("unchecked") public static List<Container> getContainers(PodSpec podSpec) { if (podSpec != null) { return podSpec.getContainers(); } return Collections.EMPTY_LIST; } @SuppressWarnings("unchecked") public static List<Container> getContainers(PodTemplateSpec podTemplateSpec) { if (podTemplateSpec != null) { return getContainers(podTemplateSpec.getSpec()); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller */ @SuppressWarnings("unchecked") public static List<Container> getCurrentContainers(ReplicationController replicationController) { if (replicationController != null) { // TODO } return Collections.EMPTY_LIST; } /** * Returns all the current containers from the given currentState */ @SuppressWarnings("unchecked") public static Map<String, ContainerStatus> getCurrentContainers(Pod pod) { if (pod != null) { PodStatus currentStatus = pod.getStatus(); return getCurrentContainers(currentStatus); } return Collections.EMPTY_MAP; } /** * Returns all the current containers from the given podStatus */ @SuppressWarnings("unchecked") public static Map<String, ContainerStatus> getCurrentContainers(PodStatus podStatus) { if (podStatus != null) { List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses(); Map<String, ContainerStatus> info = new Hashtable<>(containerStatuses.size()); for (ContainerStatus status : containerStatuses) { info.put(status.getContainerID(), status); } return info; } return Collections.EMPTY_MAP; } /** * Returns the host of the pod */ public static String getHost(Pod pod) { if (pod != null) { PodStatus currentState = pod.getStatus(); if (currentState != null) { return currentState.getHostIP(); } } return null; } /** * Returns the container port number for the given service */ @SuppressWarnings("unchecked") public static Set<Integer> getContainerPorts(Service service) { Set<Integer> answer = Collections.EMPTY_SET; String id = getName(service); ServiceSpec spec = service.getSpec(); if (spec != null) { List<ServicePort> servicePorts = spec.getPorts(); Objects.notNull(servicePorts, "servicePorts for service " + id); answer = new HashSet<>(servicePorts.size()); String message = "service " + id; for (ServicePort port : servicePorts) { IntOrString intOrStringValue = port.getTargetPort(); Integer intValue = intOrStringToInteger(intOrStringValue, message); if (intValue != null) { answer.add(intValue); } } } return answer; } /** * Returns the IntOrString converted to an Integer value or throws an exception with the given message */ public static Integer intOrStringToInteger(IntOrString intOrStringValue, String message) { Integer intValue = intOrStringValue.getIntVal(); if (intValue == null) { String containerPortText = intOrStringValue.getStrVal(); if (Strings.isNullOrBlank(containerPortText)) { throw new IllegalArgumentException("No port for " + message); } try { intValue = Integer.parseInt(containerPortText); } catch (NumberFormatException e) { throw new IllegalStateException("Invalid servicePorts expression " + containerPortText + " for " + message + ". " + e, e); } } return intValue; } /** * Returns the container port number for the given service */ @SuppressWarnings("unchecked") public static Set<String> getContainerPortsStrings(Service service) { Set<String> answer = Collections.EMPTY_SET; String id = getName(service); ServiceSpec spec = service.getSpec(); if (spec != null) { List<ServicePort> servicePorts = spec.getPorts(); Objects.notNull(servicePorts, "servicePorts for service " + id); answer = new HashSet<>(servicePorts.size()); for (ServicePort port : servicePorts) { IntOrString intOrStringValue = port.getTargetPort(); Integer intValue = intOrStringValue.getIntVal(); if (intValue != null) { answer.add(intValue.toString()); } else { String containerPortText = intOrStringValue.getStrVal(); if (Strings.isNullOrBlank(containerPortText)) { throw new IllegalArgumentException("No servicePorts for service " + id); } answer.add(containerPortText); } } } return answer; } /** * Combines the JSON objects into a config object */ public static Object combineJson(Object... objects) throws IOException { KubernetesList list = findOrCreateList(objects); List<HasMetadata> items = list.getItems(); if (items == null) { items = new ArrayList<>(); list.setItems(items); } for (Object object : objects) { if (object != list) { addObjectsToItemArray(items, object); } } moveServicesToFrontOfArray(items); removeDuplicates(items); Object answer = Templates.combineTemplates(list, items); items = toItemList(answer); removeDuplicates(items); return answer; } /** * Lets move all Service resources before any other to avoid ordering issues creating things */ public static void moveServicesToFrontOfArray(List<HasMetadata> list) { int size = list.size(); int lastNonService = -1; for (int i = 0; i < size; i++) { HasMetadata item = list.get(i); if (item instanceof Service) { if (lastNonService >= 0) { HasMetadata nonService = list.get(lastNonService); list.set(i, nonService); list.set(lastNonService, item); lastNonService++; } } else if (lastNonService < 0) { lastNonService = i; } } } /** * Remove any duplicate resources using the kind and id */ protected static void removeDuplicates(List<HasMetadata> itemArray) { int size = itemArray.size(); int lastNonService = -1; Set<String> keys = new HashSet<>(); for (int i = 0; i < size; i++) { HasMetadata item = itemArray.get(i); if (item == null) { itemArray.remove(i); i--; size--; } else { String id = getObjectId(item); String kind = item.getClass().getSimpleName(); if (Strings.isNotBlank(id)) { String key = kind + ":" + id; if (!keys.add(key)) { // lets remove this one itemArray.remove(i); i--; size--; } } } } } @SuppressWarnings("unchecked") protected static void addObjectsToItemArray(List destinationList, Object object) throws IOException { if (object instanceof KubernetesList) { KubernetesList kubernetesList = (KubernetesList) object; List<HasMetadata> items = kubernetesList.getItems(); if (items != null) { destinationList.addAll(items); } } else if (object instanceof Collection) { Collection collection = (Collection) object; destinationList.addAll(collection); } else { destinationList.add(object); } } protected static KubernetesList findOrCreateList(Object[] objects) { KubernetesList list = null; for (Object object : objects) { if (object instanceof KubernetesList) { list = (KubernetesList) object; break; } } if (list == null) { list = new KubernetesList(); } return list; } /** * Returns the URL to access the service; using the service portalIP and port */ public static String getServiceURL(Service service) { if (service != null) { ServiceSpec spec = service.getSpec(); if (spec != null) { String portalIP = spec.getClusterIP(); if (portalIP != null) { Integer port = spec.getPorts().iterator().next().getPort(); if (port != null && port > 0) { portalIP += ":" + port; } String protocol = "http://"; if (KubernetesHelper.isServiceSsl(spec.getClusterIP(), port, Utils.getSystemPropertyOrEnvVar(io.fabric8.kubernetes.client.Config.KUBERNETES_TRUST_CERT_SYSTEM_PROPERTY, false))) { protocol = "https://"; } return protocol + portalIP; } } } return null; } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, boolean serviceExternal) { return getServiceURL(client, serviceName, serviceNamespace, serviceProtocol, null, serviceExternal); } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, String servicePortName, boolean serviceExternal) { Service srv = null; String serviceHost = serviceToHost(serviceName); String servicePort = serviceToPort(serviceName, servicePortName); String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort); //1. Inside Kubernetes: Services as ENV vars if (!serviceExternal && Strings.isNotBlank(serviceHost) && Strings.isNotBlank(servicePort) && Strings.isNotBlank(serviceProtocol)) { return serviceProtocol + "://" + serviceHost + ":" + servicePort; //2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests. } else if (Strings.isNotBlank(serviceNamespace)) { srv = client.services().inNamespace(serviceNamespace).withName(serviceName).get(); } else { for (Service s : client.services().list().getItems()) { String sid = getName(s); if (serviceName.equals(sid)) { srv = s; break; } } } if (srv == null) { throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName + " in namespace: " + serviceNamespace); } if (Strings.isNullOrBlank(servicePortName) && isOpenShift(client)) { OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class); RouteList routeList = openShiftClient.routes().inNamespace(serviceNamespace).list(); for (Route route : routeList.getItems()) { if (route.getSpec().getTo().getName().equals(serviceName)) { return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase(); } } } ServicePort port = findServicePortByName(srv, servicePortName); if (port == null) { throw new RuntimeException("Couldn't find port: " + servicePortName + " for service:" + serviceName); } return (serviceProto + "://" + srv.getSpec().getPortalIP() + ":" + port.getPort()).toLowerCase(); } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURLInCurrentNamespace(KubernetesClient client, String serviceName, String serviceProtocol, String servicePortName, boolean serviceExternal) { Service srv = null; String serviceHost = serviceToHost(serviceName); String servicePort = serviceToPort(serviceName, servicePortName); String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort); //1. Inside Kubernetes: Services as ENV vars if (!serviceExternal && Strings.isNotBlank(serviceHost) && Strings.isNotBlank(servicePort) && Strings.isNotBlank(serviceProtocol)) { return serviceProtocol + "://" + serviceHost + ":" + servicePort; //2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests. } else { srv = client.services().withName(serviceName).get(); } if (srv == null) { throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName); } if (Strings.isNullOrBlank(servicePortName) && isOpenShift(client)) { OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class); RouteList routeList = openShiftClient.routes().list(); for (Route route : routeList.getItems()) { if (route.getSpec().getTo().getName().equals(serviceName)) { return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase(); } } } ServicePort port = findServicePortByName(srv, servicePortName); if (port == null) { throw new RuntimeException("Couldn't find port: " + servicePortName + " for service:" + serviceName); } return (serviceProto + "://" + srv.getSpec().getPortalIP() + ":" + port.getPort()).toLowerCase(); } public static String serviceToHost(String id) { return Systems.getEnvVarOrSystemProperty(toEnvVariable(id + HOST_SUFFIX), ""); } public static String serviceToPort(String serviceId) { return serviceToPort(serviceId, null); } public static String serviceToPort(String serviceId, String portName) { String name = serviceId + PORT_SUFFIX + (Strings.isNotBlank(portName) ? "_" + portName : ""); return Systems.getEnvVarOrSystemProperty(toEnvVariable(name), ""); } public static String serviceToProtocol(String id, String servicePort) { return Systems.getEnvVarOrSystemProperty(toEnvVariable(id + PORT_SUFFIX + "_" + servicePort + PROTO_SUFFIX), DEFAULT_PROTO); } public static String toEnvVariable(String str) { return str.toUpperCase().replaceAll("-", "_"); } /** * Returns the port for the given port number on the pod */ public static ContainerPort findContainerPort(Pod pod, Integer portNumber) { List<Container> containers = KubernetesHelper.getContainers(pod); for (Container container : containers) { List<ContainerPort> ports = container.getPorts(); for (ContainerPort port : ports) { if (Objects.equal(portNumber, port.getContainerPort())) { return port; } } } return null; } public static ServicePort findServicePortByName(Service service, String portName) { if (Strings.isNullOrBlank(portName)) { return service.getSpec().getPorts().iterator().next(); } for (ServicePort servicePort : service.getSpec().getPorts()) { if (servicePort.getName().equals(portName)) { return servicePort; } } return null; } /** * Returns the port for the given port name */ public static ContainerPort findContainerPortByName(Pod pod, String name) { List<Container> containers = KubernetesHelper.getContainers(pod); for (Container container : containers) { List<ContainerPort> ports = container.getPorts(); for (ContainerPort port : ports) { if (Objects.equal(name, port.getName())) { return port; } } } return null; } /** * Returns the port for the given port number or name */ public static ContainerPort findContainerPortByNumberOrName(Pod pod, String numberOrName) { Integer portNumber = toOptionalNumber(numberOrName); if (portNumber != null) { return findContainerPort(pod, portNumber); } else { return findContainerPortByName(pod, numberOrName); } } /** * Returns the number if it can be parsed or null */ protected static Integer toOptionalNumber(String text) { if (Strings.isNotBlank(text)) { try { return Integer.parseInt(text); } catch (NumberFormatException e) { // ignore parse errors } } return null; } public static PodStatusType getPodStatus(Pod pod) { String text = getPodStatusText(pod); if (Strings.isNotBlank(text)) { text = text.toLowerCase(); if (text.startsWith("run")) { return PodStatusType.OK; } else if (text.startsWith("wait")) { return PodStatusType.WAIT; } else { return PodStatusType.ERROR; } } return PodStatusType.WAIT; } /** * Returns true if the pod is running */ public static boolean isPodRunning(Pod pod) { PodStatusType status = getPodStatus(pod); return Objects.equal(status, PodStatusType.OK); } public static String getPodStatusText(Pod pod) { if (pod != null) { PodStatus podStatus = pod.getStatus(); if (podStatus != null) { return podStatus.getPhase(); } } return null; } /** * Returns the environment variable value for the first container which has a value for it in th epod */ public static String getPodEnvVar(Pod pod, String envVarName) { if (pod != null) { PodSpec spec = pod.getSpec(); if (spec != null) { List<Container> containers = spec.getContainers(); if (containers != null) { for (Container container : containers) { String answer = getContainerEnvVar(container, envVarName); if (Strings.isNotBlank(answer)) { return answer; } } } } } return null; } /** * Returns the environment variable value for the given container and name */ public static String getContainerEnvVar(Container container, String envVarName) { if (container != null) { List<EnvVar> env = container.getEnv(); if (env != null) { for (EnvVar envVar : env) { if (Objects.equal(envVarName, envVar.getName())) { return envVar.getValue(); } } } } return null; } /** * Returns the pods for the given replication controller */ @SuppressWarnings("unchecked") public static List<Pod> getPodsForReplicationController(ReplicationController replicationController, Iterable<Pod> pods) { ReplicationControllerSpec replicationControllerSpec = replicationController.getSpec(); if (replicationControllerSpec == null) { LOG.warn("Cannot instantiate replication controller: " + getName(replicationController) + " due to missing ReplicationController.Spec!"); } else { Map<String, String> replicaSelector = replicationControllerSpec.getSelector(); Filter<Pod> podFilter = KubernetesHelper.createPodFilter(replicaSelector); return Filters.filter(pods, podFilter); } return Collections.EMPTY_LIST; } /** * Returns the pods for the given service */ public static List<Pod> getPodsForService(Service service, Iterable<Pod> pods) { Map<String, String> selector = getSelector(service); Filter<Pod> podFilter = KubernetesHelper.createPodFilter(selector); return Filters.filter(pods, podFilter); } /** * Looks up the service endpoints in DNS. * <p/> * Endpoints are registered as SRV records in DNS so this method returns * endpoints in the format "host:port". This is a list as SRV records are ordered * by priority & weight before being returned to the client. * <p/> * See https://github.com/GoogleCloudPlatform/kubernetes/blob/master/cluster/addons/dns/README.md */ public static List<String> lookupServiceEndpointsInDns(String serviceName) throws IllegalArgumentException, UnknownHostException { try { Lookup l = new Lookup(serviceName, Type.SRV); Record[] records = l.run(); if (l.getResult() == Lookup.SUCCESSFUL) { SRVRecord[] srvRecords = Arrays.copyOf(records, records.length, SRVRecord[].class); Arrays.sort(srvRecords, new Comparator<SRVRecord>() { @Override public int compare(SRVRecord a, SRVRecord b) { int ret = Integer.compare(b.getPriority(), a.getPriority()); if (ret == 0) { ret = Integer.compare(b.getWeight(), a.getWeight()); } return ret; } }); List<String> endpointAddresses = new ArrayList<>(srvRecords.length); for (SRVRecord srvRecord : srvRecords) { endpointAddresses.add(srvRecord.getTarget().toString(true).concat(":").concat(String.valueOf(srvRecord.getPort()))); } return endpointAddresses; } else { LOG.warn("Lookup {} result: {}", serviceName, l.getErrorString()); } } catch (TextParseException e) { LOG.error("Unparseable service name: {}", serviceName, e); } catch (ClassCastException e) { LOG.error("Invalid response from DNS server - should have been A records", e); } return Collections.EMPTY_LIST; } /** * Looks up the service in DNS. * If this is a headless service, this call returns the endpoint IPs from DNS. * If this is a non-headless service, this call returns the service IP only. * <p/> * See https://github.com/GoogleCloudPlatform/kubernetes/blob/master/docs/services.md#headless-services */ public static Set<String> lookupServiceInDns(String serviceName) throws IllegalArgumentException, UnknownHostException { try { Lookup l = new Lookup(serviceName); Record[] records = l.run(); if (l.getResult() == Lookup.SUCCESSFUL) { Set<String> endpointAddresses = new HashSet<>(records.length); for (int i = 0; i < records.length; i++) { ARecord aRecord = (ARecord) records[i]; endpointAddresses.add(aRecord.getAddress().getHostAddress()); } return endpointAddresses; } else { LOG.warn("Lookup {} result: {}", serviceName, l.getErrorString()); } } catch (TextParseException e) { LOG.error("Unparseable service name: {}", serviceName, e); } catch (ClassCastException e) { LOG.error("Invalid response from DNS server - should have been A records", e); } return Collections.EMPTY_SET; } public static boolean isServiceSsl(String host, int port, boolean trustAllCerts) { try { LOG.info("Checking if a service is SSL on " + host + ":" + port); SSLSocketFactory sslsocketfactory; if (trustAllCerts) { sslsocketfactory = TrustEverythingSSLTrustManager.getTrustingSSLSocketFactory(); } else { sslsocketfactory = (SSLSocketFactory) SSLSocketFactory.getDefault(); } Socket socket = sslsocketfactory.createSocket(); // Connect, with an explicit timeout value socket.connect(new InetSocketAddress(host, port), 1 * 1000); try { InputStream in = socket.getInputStream(); OutputStream out = socket.getOutputStream(); // Write a test byte to get a reaction :) out.write(1); while (in.available() > 0) { System.out.print(in.read()); } return true; } finally { LOG.info("Checked if a service is SSL on " + host + ":" + port); socket.close(); } } catch (SSLHandshakeException e) { LOG.error("SSL handshake failed - this probably means that you need to trust the kubernetes root SSL certificate or set the environment variable " + Utils.convertSystemPropertyNameToEnvVar(io.fabric8.kubernetes.client.Config.KUBERNETES_TRUST_CERT_SYSTEM_PROPERTY), e); } catch (SSLProtocolException e) { LOG.error("SSL protocol error", e); } catch (SSLKeyException e) { LOG.error("Bad SSL key", e); } catch (SSLPeerUnverifiedException e) { LOG.error("Could not verify server", e); } catch (SSLException e) { LOG.debug("Address does not appear to be SSL-enabled - falling back to http", e); } catch (IOException e) { LOG.debug("Failed to validate service", e); } return false; } /** * Validates that the given value is valid according to the kubernetes ID parsing rules, throwing an exception if not. */ public static String validateKubernetesId(String currentValue, String description) throws IllegalArgumentException { if (isNullOrBlank(currentValue)) { throw new IllegalArgumentException("No " + description + " is specified!"); } int size = currentValue.length(); for (int i = 0; i < size; i++) { char ch = currentValue.charAt(i); if (Character.isUpperCase(ch)) { throw new IllegalArgumentException("Invalid upper case letter '" + ch + "' at index " + i + " for " + description + " value: " + currentValue); } } return currentValue; } public static Date parseDate(String text) { try { return dateTimeFormat.parse(text); } catch (ParseException e) { LOG.warn("Failed to parse date: " + text + ". Reason: " + e); return null; } } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Object object) { if (object instanceof Route) { return summaryText((Route) object); } else if (object instanceof Service) { return summaryText((Service) object); } else if (object instanceof ReplicationController) { return summaryText((ReplicationController) object); } else if (object instanceof Pod) { return summaryText((Pod) object); } else if (object instanceof Template) { return summaryText((Template) object); } else if (object instanceof DeploymentConfig) { return summaryText((DeploymentConfig) object); } else if (object instanceof OAuthClient) { return summaryText((OAuthClient) object); } else if (object instanceof String) { return object.toString(); } return ""; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Route entity) { RouteSpec spec = entity.getSpec(); if (spec == null) { return "No spec!"; } return "host: " + spec.getHost(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(ContainerState entity) { ContainerStateRunning running = entity.getRunning(); if (running != null) { return "Running"; } ContainerStateWaiting waiting = entity.getWaiting(); if (waiting != null) { return "Waiting"; } ContainerStateTerminated termination = entity.getTerminated(); if (termination != null) { return "Terminated"; } return "Unknown"; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Template entity) { StringBuilder buffer = new StringBuilder(); List<Parameter> parameters = entity.getParameters(); if (parameters != null) { for (Parameter parameter : parameters) { String name = parameter.getName(); appendText(buffer, name); } } return "parameters: " + buffer; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(OAuthClient entity) { return "redirectURIs: " + entity.getRedirectURIs(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Service entity) { StringBuilder portText = new StringBuilder(); ServiceSpec spec = entity.getSpec(); if (spec == null) { return "No spec"; } else { List<ServicePort> ports = spec.getPorts(); if (ports != null) { for (ServicePort port : ports) { Integer number = port.getPort(); if (number != null) { if (portText.length() > 0) { portText.append(", "); } portText.append("").append(number); } } } return "selector: " + spec.getSelector() + " ports: " + portText; } } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(ReplicationController entity) { StringBuilder buffer = new StringBuilder(); ReplicationControllerSpec spec = entity.getSpec(); if (spec != null) { buffer.append("replicas: ").append(spec.getReplicas()); PodTemplateSpec podTemplateSpec = spec.getTemplate(); if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec); } } return buffer.toString(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(DeploymentConfig entity) { StringBuilder buffer = new StringBuilder(); DeploymentConfigSpec spec = entity.getSpec(); if (spec != null) { buffer.append("replicas: " + spec.getReplicas()); PodTemplateSpec podTemplateSpec = spec.getTemplate(); if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec); } } return buffer.toString(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Pod entity) { StringBuilder buffer = new StringBuilder(); PodSpec podSpec = entity.getSpec(); appendSummaryText(buffer, podSpec); return buffer.toString(); } protected static void appendSummaryText(StringBuilder buffer, PodTemplateSpec podTemplateSpec) { if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec.getSpec()); } } protected static void appendSummaryText(StringBuilder buffer, PodSpec podSpec) { if (podSpec != null) { List<Container> containers = podSpec.getContainers(); if (containers != null) { for (Container container : containers) { String image = container.getImage(); appendText(buffer, "image: " + image); } } } } protected static void appendText(StringBuilder buffer, String text) { if (buffer.length() > 0) { buffer.append(", "); } buffer.append(text); } /** * Creates an IntOrString from the given string which could be a number or a name */ public static IntOrString createIntOrString(int intVal) { IntOrString answer = new IntOrString(); answer.setIntVal(intVal); answer.setKind(INTORSTRING_KIND_INT); return answer; } /** * Creates an IntOrString from the given string which could be a number or a name */ public static IntOrString createIntOrString(String nameOrNumber) { if (isNullOrBlank(nameOrNumber)) { return null; } else { IntOrString answer = new IntOrString(); Integer intVal = null; try { intVal = Integer.parseInt(nameOrNumber); } catch (Exception e) { // ignore invalid number } if (intVal != null) { answer.setIntVal(intVal); answer.setKind(INTORSTRING_KIND_INT); } else { answer.setStrVal(nameOrNumber); answer.setKind(INTORSTRING_KIND_STRING); } return answer; } } public static String getStatusText(PodStatus podStatus) { String status; List<String> statusList = new ArrayList<>(); List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses(); for (ContainerStatus containerStatus : containerStatuses) { ContainerState state = containerStatus.getState(); String statusText = summaryText(state); if (statusText != null) { statusList.add(statusText); } } if (statusList.size() == 1) { status = statusList.get(0); } else { status = statusList.toString(); } return status; } public static Secret validateSecretExists(KubernetesClient kubernetes, String namespace, String secretName) { Secret secret = null; try { secret = kubernetes.secrets().inNamespace(namespace).withName(secretName).get(); } catch (KubernetesClientException e) { if (e.getCode() == 404 || e.getCode() == 403) { // does not exist or namespace does not exists } else { throw e; } } if (secret == null) { throw new IllegalArgumentException("No secret named: " + secretName + " for namespace " + namespace + " is available on Kubernetes" + ". For how to create secrets see: http://fabric8.io/guide/fabric8OnOpenShift.html#requirements "); } else { return secret; } } /** * Converts the DTO loaded from JSON to a {@link KubernetesList} assuming its not a {@link Template} */ public static KubernetesList asKubernetesList(Object dto) throws IOException { if (dto instanceof KubernetesList) { return (KubernetesList) dto; } else { KubernetesList answer = new KubernetesList(); List<HasMetadata> items = toItemList(dto); answer.setItems(items); return answer; } } /** * Returns true if this object is a pure kubernetes DTO */ public static boolean isPureKubernetes(HasMetadata item) { if (item != null) { String name = item.getClass().getName(); return name.startsWith("io.fabric8.kubernetes"); } return false; } public static boolean isOpenShift(KubernetesClient client) { URL masterUrl = client.getMasterUrl(); if (IS_OPENSHIFT.containsKey(masterUrl)) { return IS_OPENSHIFT.get(masterUrl); } else { RootPaths rootPaths = client.rootPaths(); if (rootPaths != null) { List<String> paths = rootPaths.getPaths(); if (paths != null) { for (String path : paths) { if (java.util.Objects.equals("/oapi", path) || java.util.Objects.equals("oapi", path)) { IS_OPENSHIFT.putIfAbsent(masterUrl, true); return true; } } } } } IS_OPENSHIFT.putIfAbsent(masterUrl, false); return false; } }
components/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/KubernetesHelper.java
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.kubernetes.api; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import io.fabric8.kubernetes.api.extensions.Templates; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.ContainerPort; import io.fabric8.kubernetes.api.model.ContainerState; import io.fabric8.kubernetes.api.model.ContainerStateRunning; import io.fabric8.kubernetes.api.model.ContainerStateTerminated; import io.fabric8.kubernetes.api.model.ContainerStateWaiting; import io.fabric8.kubernetes.api.model.ContainerStatus; import io.fabric8.kubernetes.api.model.EnvVar; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.IntOrString; import io.fabric8.kubernetes.api.model.KubernetesList; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodList; import io.fabric8.kubernetes.api.model.PodSpec; import io.fabric8.kubernetes.api.model.PodStatus; import io.fabric8.kubernetes.api.model.PodTemplateSpec; import io.fabric8.kubernetes.api.model.ReplicationController; import io.fabric8.kubernetes.api.model.ReplicationControllerList; import io.fabric8.kubernetes.api.model.ReplicationControllerSpec; import io.fabric8.kubernetes.api.model.RootPaths; import io.fabric8.kubernetes.api.model.Secret; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.ServiceList; import io.fabric8.kubernetes.api.model.ServicePort; import io.fabric8.kubernetes.api.model.ServiceSpec; import io.fabric8.kubernetes.client.Config; import io.fabric8.kubernetes.client.ConfigBuilder; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.internal.Utils; import io.fabric8.openshift.api.model.DeploymentConfig; import io.fabric8.openshift.api.model.DeploymentConfigSpec; import io.fabric8.openshift.api.model.OAuthClient; import io.fabric8.openshift.api.model.Parameter; import io.fabric8.openshift.api.model.Route; import io.fabric8.openshift.api.model.RouteList; import io.fabric8.openshift.api.model.RouteSpec; import io.fabric8.openshift.api.model.Template; import io.fabric8.openshift.client.OpenShiftClient; import io.fabric8.utils.Files; import io.fabric8.utils.Filter; import io.fabric8.utils.Filters; import io.fabric8.utils.Objects; import io.fabric8.utils.Strings; import io.fabric8.utils.Systems; import io.fabric8.utils.ssl.TrustEverythingSSLTrustManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xbill.DNS.ARecord; import org.xbill.DNS.Lookup; import org.xbill.DNS.Record; import org.xbill.DNS.SRVRecord; import org.xbill.DNS.TextParseException; import org.xbill.DNS.Type; import javax.net.ssl.SSLException; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLKeyException; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLProtocolException; import javax.net.ssl.SSLSocketFactory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URL; import java.net.UnknownHostException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static io.fabric8.utils.Lists.notNullList; import static io.fabric8.utils.Strings.isNullOrBlank; /** * Kubernetes utility methods. */ public final class KubernetesHelper { public static final String KUBERNETES_NAMESPACE_SYSTEM_PROPERTY = "kubernetes.namespace"; public static final String KUBERNETES_NAMESPACE_ENV = "KUBERNETES_NAMESPACE"; public static final String DEFAULT_NAMESPACE = "default"; private static final transient Logger LOG = LoggerFactory.getLogger(KubernetesHelper.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); public static final int INTORSTRING_KIND_INT = 0; public static final int INTORSTRING_KIND_STRING = 1; public static final String DEFAULT_DOCKER_HOST = "tcp://localhost:2375"; protected static SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssX"); public static final String defaultApiVersion = "v1"; public static final String defaultOsApiVersion = "v1"; private static final String HOST_SUFFIX = "_SERVICE_HOST"; private static final String PORT_SUFFIX = "_SERVICE_PORT"; private static final String PROTO_SUFFIX = "_TCP_PROTO"; public static final String DEFAULT_PROTO = "tcp"; private static final ConcurrentMap<URL, Boolean> IS_OPENSHIFT = new ConcurrentHashMap<>(); private static final Config CONFIG = new ConfigBuilder().build(); public static String defaultNamespace() { return CONFIG.getNamespace(); } /** * Returns the ID of the given object */ public static String getObjectId(Object object) { if (object instanceof HasMetadata) { return getName((HasMetadata) object); } else { return object != null ? object.toString() : null; } } public static ObjectMeta getOrCreateMetadata(HasMetadata entity) { ObjectMeta metadata = entity.getMetadata(); if (metadata == null) { metadata = new ObjectMeta(); entity.setMetadata(metadata); } return metadata; } /** * Returns the resource version for the entity or null if it does not have one */ public static String getResourceVersion(HasMetadata entity) { if (entity != null) { ObjectMeta metadata = entity.getMetadata(); if (metadata != null) { String resourceVersion = metadata.getResourceVersion(); if (Strings.isNotBlank(resourceVersion)) { return resourceVersion; } } } return null; } /** * Returns true if this entity has a valid non blank resourceVersion in its metadata */ public static boolean hasResourceVersion(HasMetadata entity) { return getResourceVersion(entity) != null; } public static String getName(ObjectMeta entity) { if (entity != null) { return Strings.firstNonBlank(entity.getName(), getAdditionalPropertyText(entity.getAdditionalProperties(), "id"), entity.getUid()); } else { return null; } } /** * Returns the kind of the entity */ public static String getKind(HasMetadata entity) { if (entity != null) { // TODO use reflection to find the kind? if (entity instanceof KubernetesList) { return "List"; } else { return entity.getClass().getSimpleName(); } } else { return null; } } public static String getName(HasMetadata entity) { if (entity != null) { return getName(entity.getMetadata()); } else { return null; } } public static void setName(HasMetadata entity, String name) { getOrCreateMetadata(entity).setName(name); } public static void setName(HasMetadata entity, String namespace, String name) { ObjectMeta metadata = getOrCreateMetadata(entity); metadata.setNamespace(namespace); metadata.setName(name); } public static void setNamespace(HasMetadata entity, String namespace) { getOrCreateMetadata(entity).setNamespace(namespace); } public static String getNamespace(ObjectMeta entity) { if (entity != null) { return entity.getNamespace(); } else { return null; } } public static String getNamespace(HasMetadata entity) { if (entity != null) { return getNamespace(entity.getMetadata()); } else { return null; } } public static Map<String, String> getOrCreateAnnotations(HasMetadata entity) { ObjectMeta metadata = getOrCreateMetadata(entity); Map<String, String> answer = metadata.getAnnotations(); if (answer == null) { // use linked so the annotations can be in the FIFO order answer = new LinkedHashMap<>(); metadata.setAnnotations(answer); } return answer; } public static Map<String, String> getOrCreateLabels(HasMetadata entity) { ObjectMeta metadata = getOrCreateMetadata(entity); Map<String, String> answer = metadata.getLabels(); if (answer == null) { // use linked so the annotations can be in the FIFO order answer = new LinkedHashMap<>(); metadata.setLabels(answer); } return answer; } /** * Returns the labels of the given metadata object or an empty map if the metadata or labels are null */ @SuppressWarnings("unchecked") public static Map<String, String> getLabels(ObjectMeta metadata) { if (metadata != null) { Map<String, String> labels = metadata.getLabels(); if (labels != null) { return labels; } } return Collections.EMPTY_MAP; } @SuppressWarnings("unchecked") public static Map<String, String> getLabels(HasMetadata entity) { if (entity != null) { return getLabels(entity.getMetadata()); } return Collections.EMPTY_MAP; } public static ServiceSpec getOrCreateSpec(Service entity) { ServiceSpec spec = entity.getSpec(); if (spec == null) { spec = new ServiceSpec(); entity.setSpec(spec); } return spec; } public static String getPortalIP(Service entity) { String answer = null; if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); return spec.getClusterIP(); } return answer; } @SuppressWarnings("unchecked") public static Map<String, String> getSelector(Service entity) { Map<String, String> answer = null; if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); answer = spec.getSelector(); } return answer != null ? answer : Collections.EMPTY_MAP; } public static void setSelector(Service entity, Map<String, String> labels) { ServiceSpec spec = getOrCreateSpec(entity); spec.setSelector(labels); } public static Set<Integer> getPorts(Service entity) { Set<Integer> answer = new HashSet<>(); if (entity != null) { ServiceSpec spec = getOrCreateSpec(entity); for (ServicePort port : spec.getPorts()) { answer.add(port.getPort()); } } return answer; } protected static String getAdditionalPropertyText(Map<String, Object> additionalProperties, String name) { if (additionalProperties != null) { Object value = additionalProperties.get(name); if (value != null) { return value.toString(); } } return null; } protected static Map<String, Object> getMetadata(Map<String, Object> additionalProperties, boolean create) { Map<String, Object> answer = getAdditionalPropertyMap(additionalProperties, "metadata"); if (answer == null) { answer = new LinkedHashMap<>(); if (create) { additionalProperties.put("metadata", answer); } } return answer; } @SuppressWarnings("unchecked") protected static Map<String, Object> getAdditionalPropertyMap(Map<String, Object> additionalProperties, String name) { if (additionalProperties != null) { Object value = additionalProperties.get(name); if (value instanceof Map) { return (Map<String, Object>) value; } } return null; } public static String getDockerIp() { String url = resolveDockerHost(); int idx = url.indexOf("://"); if (idx > 0) { url = url.substring(idx + 3); } idx = url.indexOf(":"); if (idx > 0) { url = url.substring(0, idx); } return url; } public static String resolveDockerHost() { String dockerHost = System.getenv("DOCKER_HOST"); if (isNullOrBlank(dockerHost)) { dockerHost = System.getProperty("docker.host"); } if (isNullOrBlank(dockerHost)) { return DEFAULT_DOCKER_HOST; } else { return dockerHost; } } public static String toJson(Object dto) throws JsonProcessingException { Class<?> clazz = dto.getClass(); return OBJECT_MAPPER.writerFor(clazz).writeValueAsString(dto); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(File file) throws IOException { byte[] data = Files.readBytes(file); return loadJson(data); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(InputStream in) throws IOException { byte[] data = Files.readBytes(in); return loadJson(data); } public static Object loadJson(String json) throws IOException { byte[] data = json.getBytes(); return loadJson(data); } /** * Returns the given json data as a DTO such as * {@link Pod}, {@link ReplicationController} or * {@link io.fabric8.kubernetes.api.model.Service} * from the Kubernetes REST API */ public static Object loadJson(byte[] json) throws IOException { if (json != null && json.length > 0) { return OBJECT_MAPPER.readerFor(KubernetesResource.class).readValue(json); } return null; } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(InputStream in, Class<T> clazz) throws IOException { byte[] data = Files.readBytes(in); return loadYaml(data, clazz); } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(File file, Class<T> clazz) throws IOException { byte[] data = Files.readBytes(file); return loadYaml(data, clazz); } /** * Loads the YAML file for the given DTO class */ public static <T> T loadYaml(byte[] data, Class<T> clazz) throws IOException { ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); return mapper.readValue(data, clazz); } /** * Loads the Kubernetes JSON and converts it to a list of entities */ @SuppressWarnings("unchecked") public static List<HasMetadata> toItemList(Object entity) throws IOException { if (entity instanceof List) { return (List<HasMetadata>) entity; } else if (entity instanceof HasMetadata[]) { HasMetadata[] array = (HasMetadata[]) entity; return Arrays.asList(array); } else if (entity instanceof KubernetesList) { KubernetesList config = (KubernetesList) entity; return config.getItems(); } else if (entity instanceof Template) { Template objects = (Template) entity; return objects.getObjects(); } else { List<HasMetadata> answer = new ArrayList<>(); if (entity instanceof HasMetadata) { answer.add((HasMetadata) entity); } return answer; } } /** * Saves the json object to the given file */ public static void saveJson(File json, Object object) throws IOException { OBJECT_MAPPER.writer().writeValue(json, object); } /** * Returns a map indexed by pod id of the pods */ public static Map<String, Pod> toPodMap(PodList podSchema) { return toFilteredPodMap(podSchema, Filters.<Pod>trueFilter()); } protected static Map<String, Pod> toFilteredPodMap(PodList podSchema, Filter<Pod> filter) { List<Pod> list = podSchema != null ? podSchema.getItems() : null; List<Pod> filteredList = Filters.filter(list, filter); return toPodMap(filteredList); } /** * Returns a map indexed by pod id of the pods */ public static Map<String, Pod> toPodMap(List<Pod> pods) { List<Pod> list = notNullList(pods); Map<String, Pod> answer = new HashMap<>(); for (Pod pod : list) { String id = getName(pod); if (Strings.isNotBlank(id)) { answer.put(id, pod); } } return answer; } /** * Returns a map indexed by service id of the services */ public static Map<String, Service> toServiceMap(ServiceList serviceSchema) { return toServiceMap(serviceSchema != null ? serviceSchema.getItems() : null); } /** * Returns a map indexed by service id of the services */ public static Map<String, Service> toServiceMap(List<Service> services) { List<Service> list = notNullList(services); Map<String, Service> answer = new HashMap<>(); for (Service service : list) { String id = getName(service); if (Strings.isNotBlank(id)) { answer.put(id, service); } } return answer; } public static Map<String, Service> toFilteredServiceMap(ServiceList serviceList, Filter<Service> filter) { List<Service> list = serviceList != null ? serviceList.getItems() : null; List<Service> filteredList = Filters.filter(list, filter); return toServiceMap(filteredList); } /** * Returns a map indexed by replicationController id of the replicationControllers */ public static Map<String, ReplicationController> toReplicationControllerMap(ReplicationControllerList replicationControllerSchema) { Filter<ReplicationController> filter = createReplicationControllerFilter((String) null); return toFilteredReplicationControllerMap(replicationControllerSchema, filter); } protected static Map<String, ReplicationController> toFilteredReplicationControllerMap(ReplicationControllerList replicationControllerSchema, Filter<ReplicationController> filter) { List<ReplicationController> list = replicationControllerSchema != null ? replicationControllerSchema.getItems() : null; List<ReplicationController> filteredList = Filters.filter(list, filter); return toReplicationControllerMap(filteredList); } /** * Returns a map indexed by replicationController id of the replicationControllers */ public static Map<String, ReplicationController> toReplicationControllerMap(List<ReplicationController> replicationControllers) { List<ReplicationController> list = notNullList(replicationControllers); Map<String, ReplicationController> answer = new HashMap<>(); for (ReplicationController replicationControllerSchema : list) { String id = getName(replicationControllerSchema); if (Strings.isNotBlank(id)) { answer.put(id, replicationControllerSchema); } } return answer; } public static Map<String, Pod> getPodMap(KubernetesClient kubernetes) { return getPodMap(kubernetes, null); } public static Map<String, Pod> getPodMap(KubernetesClient kubernetes, String namespace) { PodList pods = null; try { pods = kubernetes.pods().inNamespace(namespace).list(); } catch (KubernetesClientException e) { if (e.getCode() == 404) { // ignore not found } else { throw e; } } return toPodMap(pods); } public static Map<String, Pod> getSelectedPodMap(KubernetesClient kubernetes, String selector) { return getSelectedPodMap(kubernetes, null, selector); } public static Map<String, Pod> getSelectedPodMap(KubernetesClient kubernetes, String namespace, String selector) { return getFilteredPodMap(kubernetes, namespace, createPodFilter(selector)); } public static Map<String, Pod> getFilteredPodMap(KubernetesClient kubernetes, Filter<Pod> filter) { return getFilteredPodMap(kubernetes, null, filter); } public static Map<String, Pod> getFilteredPodMap(KubernetesClient kubernetes, String namespace, Filter<Pod> filter) { return toFilteredPodMap(kubernetes.pods().inNamespace(namespace).list(), filter); } public static Map<String, Service> getServiceMap(KubernetesClient kubernetes) { return getServiceMap(kubernetes, null); } public static Map<String, Service> getServiceMap(KubernetesClient kubernetes, String namespace) { return toServiceMap(kubernetes.services().inNamespace(namespace).list()); } public static Map<String, ReplicationController> getReplicationControllerMap(KubernetesClient kubernetes) { return getReplicationControllerMap(kubernetes, null); } public static Map<String, ReplicationController> getReplicationControllerMap(KubernetesClient kubernetes, String namespace) { return toReplicationControllerMap(kubernetes.replicationControllers().inNamespace(namespace).list()); } public static Map<String, ReplicationController> getSelectedReplicationControllerMap(KubernetesClient kubernetes, String selector) { return getSelectedReplicationControllerMap(kubernetes, null, selector); } public static Map<String, ReplicationController> getSelectedReplicationControllerMap(KubernetesClient kubernetes, String namespace, String selector) { return toReplicationControllerMap(kubernetes.replicationControllers().inNamespace(namespace).withLabels(toLabelsMap(selector)).list()); } /** * Removes empty pods returned by Kubernetes */ public static void removeEmptyPods(PodList podSchema) { List<Pod> list = notNullList(podSchema.getItems()); List<Pod> removeItems = new ArrayList<Pod>(); for (Pod pod : list) { if (Strings.isNullOrBlank(getName(pod))) { removeItems.add(pod); } } list.removeAll(removeItems); } /** * Returns the pod id for the given container id */ public static String containerNameToPodId(String containerName) { // TODO use prefix? return containerName; } /** * Returns a string for the labels using "," to separate values */ public static String toLabelsString(Map<String, String> labelMap) { StringBuilder buffer = new StringBuilder(); if (labelMap != null) { Set<Map.Entry<String, String>> entries = labelMap.entrySet(); for (Map.Entry<String, String> entry : entries) { if (buffer.length() > 0) { buffer.append(","); } buffer.append(entry.getKey()); buffer.append("="); buffer.append(entry.getValue()); } } return buffer.toString(); } public static Map<String, String> toLabelsMap(String labels) { Map<String, String> map = new HashMap<>(); if (labels != null && !labels.isEmpty()) { String[] elements = labels.split(","); if (elements.length > 0) { for (String str : elements) { String[] keyValue = str.split("="); if (keyValue.length == 2) { String key = keyValue[0]; String value = keyValue[1]; if (key != null && value != null) { map.put(key.trim(), value.trim()); } } } } } return map; } /** * Creates a filter on a pod using the given text string */ public static Filter<Pod> createPodFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodFilter(" + textFilter + ")"; } public boolean matches(Pod entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a pod using the given set of labels */ public static Filter<Pod> createPodFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodFilter(" + labelSelector + ")"; } public boolean matches(Pod entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a pod annotations using the given set of attribute values */ public static Filter<Pod> createPodAnnotationFilter(final Map<String, String> annotationSelector) { if (annotationSelector == null || annotationSelector.isEmpty()) { return Filters.<Pod>trueFilter(); } else { return new Filter<Pod>() { public String toString() { return "PodAnnotationFilter(" + annotationSelector + ")"; } public boolean matches(Pod entity) { return filterLabels(annotationSelector, entity.getMetadata().getAnnotations()); } }; } } /** * Creates a filter on a service using the given text string */ public static Filter<Service> createServiceFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "ServiceFilter(" + textFilter + ")"; } public boolean matches(Service entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a service if it matches the given namespace */ public static Filter<Service> createNamespaceServiceFilter(final String namespace) { if (isNullOrBlank(namespace)) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "NamespaceServiceFilter(" + namespace + ")"; } public boolean matches(Service entity) { return Objects.equal(namespace, getNamespace(entity.getMetadata())); } }; } } /** * Creates a filter on a service using the given text string */ public static Filter<Service> createServiceFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<Service>trueFilter(); } else { return new Filter<Service>() { public String toString() { return "ServiceFilter(" + labelSelector + ")"; } public boolean matches(Service entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a replicationController using the given text string */ public static Filter<ReplicationController> createReplicationControllerFilter(final String textFilter) { if (isNullOrBlank(textFilter)) { return Filters.<ReplicationController>trueFilter(); } else { return new Filter<ReplicationController>() { public String toString() { return "ReplicationControllerFilter(" + textFilter + ")"; } public boolean matches(ReplicationController entity) { return filterMatchesIdOrLabels(textFilter, getName(entity), entity.getMetadata().getLabels()); } }; } } /** * Creates a filter on a replicationController using the given text string */ public static Filter<ReplicationController> createReplicationControllerFilter(final Map<String, String> labelSelector) { if (labelSelector == null || labelSelector.isEmpty()) { return Filters.<ReplicationController>trueFilter(); } else { return new Filter<ReplicationController>() { public String toString() { return "ReplicationControllerFilter(" + labelSelector + ")"; } public boolean matches(ReplicationController entity) { return filterLabels(labelSelector, entity.getMetadata().getLabels()); } }; } } /** * Returns true if the given textFilter matches either the id or the labels */ public static boolean filterMatchesIdOrLabels(String textFilter, String id, Map<String, String> labels) { String text = toLabelsString(labels); boolean result = (text != null && text.contains(textFilter)) || (id != null && id.contains(textFilter)); if (!result) { //labels can be in different order to selector Map<String, String> selectorMap = toLabelsMap(textFilter); if (!selectorMap.isEmpty() && labels != null && !labels.isEmpty()) { result = true; for (Map.Entry<String, String> entry : selectorMap.entrySet()) { String value = labels.get(entry.getKey()); if (value == null || !value.matches(entry.getValue())) { result = false; break; } } } } return result; } /** * Returns true if the given filterLabels matches the actual labels */ public static boolean filterLabels(Map<String, String> filterLabels, Map<String, String> labels) { if (labels == null) { return false; } Set<Map.Entry<String, String>> entries = filterLabels.entrySet(); for (Map.Entry<String, String> entry : entries) { String key = entry.getKey(); String expectedValue = entry.getValue(); String actualValue = labels.get(key); if (!Objects.equal(expectedValue, actualValue)) { return false; } } return true; } /** * For positive non-zero values return the text of the number or return blank */ public static String toPositiveNonZeroText(Integer port) { if (port != null) { int value = port; if (value > 0) { return "" + value; } } return ""; } /** * Returns all the containers from the given pod */ @SuppressWarnings("unchecked") public static List<Container> getContainers(Pod pod) { if (pod != null) { PodSpec podSpec = pod.getSpec(); return getContainers(podSpec); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller */ @SuppressWarnings("unchecked") public static List<Container> getContainers(ReplicationController replicationController) { if (replicationController != null) { ReplicationControllerSpec replicationControllerSpec = replicationController.getSpec(); return getContainers(replicationControllerSpec); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller's replicationControllerSpec */ @SuppressWarnings("unchecked") public static List<Container> getContainers(ReplicationControllerSpec replicationControllerSpec) { if (replicationControllerSpec != null) { PodTemplateSpec podTemplateSpec = replicationControllerSpec.getTemplate(); return getContainers(podTemplateSpec); } return Collections.EMPTY_LIST; } @SuppressWarnings("unchecked") public static List<Container> getContainers(PodSpec podSpec) { if (podSpec != null) { return podSpec.getContainers(); } return Collections.EMPTY_LIST; } @SuppressWarnings("unchecked") public static List<Container> getContainers(PodTemplateSpec podTemplateSpec) { if (podTemplateSpec != null) { return getContainers(podTemplateSpec.getSpec()); } return Collections.EMPTY_LIST; } /** * Returns all the containers from the given Replication Controller */ @SuppressWarnings("unchecked") public static List<Container> getCurrentContainers(ReplicationController replicationController) { if (replicationController != null) { // TODO } return Collections.EMPTY_LIST; } /** * Returns all the current containers from the given currentState */ @SuppressWarnings("unchecked") public static Map<String, ContainerStatus> getCurrentContainers(Pod pod) { if (pod != null) { PodStatus currentStatus = pod.getStatus(); return getCurrentContainers(currentStatus); } return Collections.EMPTY_MAP; } /** * Returns all the current containers from the given podStatus */ @SuppressWarnings("unchecked") public static Map<String, ContainerStatus> getCurrentContainers(PodStatus podStatus) { if (podStatus != null) { List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses(); Map<String, ContainerStatus> info = new Hashtable<>(containerStatuses.size()); for (ContainerStatus status : containerStatuses) { info.put(status.getContainerID(), status); } return info; } return Collections.EMPTY_MAP; } /** * Returns the host of the pod */ public static String getHost(Pod pod) { if (pod != null) { PodStatus currentState = pod.getStatus(); if (currentState != null) { return currentState.getHostIP(); } } return null; } /** * Returns the container port number for the given service */ @SuppressWarnings("unchecked") public static Set<Integer> getContainerPorts(Service service) { Set<Integer> answer = Collections.EMPTY_SET; String id = getName(service); ServiceSpec spec = service.getSpec(); if (spec != null) { List<ServicePort> servicePorts = spec.getPorts(); Objects.notNull(servicePorts, "servicePorts for service " + id); answer = new HashSet<>(servicePorts.size()); String message = "service " + id; for (ServicePort port : servicePorts) { IntOrString intOrStringValue = port.getTargetPort(); Integer intValue = intOrStringToInteger(intOrStringValue, message); if (intValue != null) { answer.add(intValue); } } } return answer; } /** * Returns the IntOrString converted to an Integer value or throws an exception with the given message */ public static Integer intOrStringToInteger(IntOrString intOrStringValue, String message) { Integer intValue = intOrStringValue.getIntVal(); if (intValue == null) { String containerPortText = intOrStringValue.getStrVal(); if (Strings.isNullOrBlank(containerPortText)) { throw new IllegalArgumentException("No port for " + message); } try { intValue = Integer.parseInt(containerPortText); } catch (NumberFormatException e) { throw new IllegalStateException("Invalid servicePorts expression " + containerPortText + " for " + message + ". " + e, e); } } return intValue; } /** * Returns the container port number for the given service */ @SuppressWarnings("unchecked") public static Set<String> getContainerPortsStrings(Service service) { Set<String> answer = Collections.EMPTY_SET; String id = getName(service); ServiceSpec spec = service.getSpec(); if (spec != null) { List<ServicePort> servicePorts = spec.getPorts(); Objects.notNull(servicePorts, "servicePorts for service " + id); answer = new HashSet<>(servicePorts.size()); for (ServicePort port : servicePorts) { IntOrString intOrStringValue = port.getTargetPort(); Integer intValue = intOrStringValue.getIntVal(); if (intValue != null) { answer.add(intValue.toString()); } else { String containerPortText = intOrStringValue.getStrVal(); if (Strings.isNullOrBlank(containerPortText)) { throw new IllegalArgumentException("No servicePorts for service " + id); } answer.add(containerPortText); } } } return answer; } /** * Combines the JSON objects into a config object */ public static Object combineJson(Object... objects) throws IOException { KubernetesList list = findOrCreateList(objects); List<HasMetadata> items = list.getItems(); if (items == null) { items = new ArrayList<>(); list.setItems(items); } for (Object object : objects) { if (object != list) { addObjectsToItemArray(items, object); } } moveServicesToFrontOfArray(items); removeDuplicates(items); Object answer = Templates.combineTemplates(list, items); items = toItemList(answer); removeDuplicates(items); return answer; } /** * Lets move all Service resources before any other to avoid ordering issues creating things */ public static void moveServicesToFrontOfArray(List<HasMetadata> list) { int size = list.size(); int lastNonService = -1; for (int i = 0; i < size; i++) { HasMetadata item = list.get(i); if (item instanceof Service) { if (lastNonService >= 0) { HasMetadata nonService = list.get(lastNonService); list.set(i, nonService); list.set(lastNonService, item); lastNonService++; } } else if (lastNonService < 0) { lastNonService = i; } } } /** * Remove any duplicate resources using the kind and id */ protected static void removeDuplicates(List<HasMetadata> itemArray) { int size = itemArray.size(); int lastNonService = -1; Set<String> keys = new HashSet<>(); for (int i = 0; i < size; i++) { HasMetadata item = itemArray.get(i); if (item == null) { itemArray.remove(i); i--; size--; } else { String id = getObjectId(item); String kind = item.getClass().getSimpleName(); if (Strings.isNotBlank(id)) { String key = kind + ":" + id; if (!keys.add(key)) { // lets remove this one itemArray.remove(i); i--; size--; } } } } } @SuppressWarnings("unchecked") protected static void addObjectsToItemArray(List destinationList, Object object) throws IOException { if (object instanceof KubernetesList) { KubernetesList kubernetesList = (KubernetesList) object; List<HasMetadata> items = kubernetesList.getItems(); if (items != null) { destinationList.addAll(items); } } else if (object instanceof Collection) { Collection collection = (Collection) object; destinationList.addAll(collection); } else { destinationList.add(object); } } protected static KubernetesList findOrCreateList(Object[] objects) { KubernetesList list = null; for (Object object : objects) { if (object instanceof KubernetesList) { list = (KubernetesList) object; break; } } if (list == null) { list = new KubernetesList(); } return list; } /** * Returns the URL to access the service; using the service portalIP and port */ public static String getServiceURL(Service service) { if (service != null) { ServiceSpec spec = service.getSpec(); if (spec != null) { String portalIP = spec.getClusterIP(); if (portalIP != null) { Integer port = spec.getPorts().iterator().next().getPort(); if (port != null && port > 0) { portalIP += ":" + port; } String protocol = "http://"; if (KubernetesHelper.isServiceSsl(spec.getClusterIP(), port, Utils.getSystemPropertyOrEnvVar(io.fabric8.kubernetes.client.Config.KUBERNETES_TRUST_CERT_SYSTEM_PROPERTY, false))) { protocol = "https://"; } return protocol + portalIP; } } } return null; } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, boolean serviceExternal) { return getServiceURL(client, serviceName, serviceNamespace, serviceProtocol, null, serviceExternal); } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, String servicePortName, boolean serviceExternal) { Service srv = null; String serviceHost = serviceToHost(serviceName); String servicePort = serviceToPort(serviceName, servicePortName); String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort); //1. Inside Kubernetes: Services as ENV vars if (!serviceExternal && Strings.isNotBlank(serviceHost) && Strings.isNotBlank(servicePort) && Strings.isNotBlank(serviceProtocol)) { return serviceProtocol + "://" + serviceHost + ":" + servicePort; //2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests. } else if (Strings.isNotBlank(serviceNamespace)) { srv = client.services().inNamespace(serviceNamespace).withName(serviceName).get(); } else { for (Service s : client.services().list().getItems()) { String sid = getName(s); if (serviceName.equals(sid)) { srv = s; break; } } } if (srv == null) { throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName + " in namespace: " + serviceNamespace); } if (Strings.isNullOrBlank(servicePortName) && isOpenShift(client)) { OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class); RouteList routeList = openShiftClient.routes().inNamespace(serviceNamespace).list(); for (Route route : routeList.getItems()) { if (route.getSpec().getTo().getName().equals(serviceName)) { return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase(); } } } ServicePort port = findServicePortByName(srv, servicePortName); if (port == null) { throw new RuntimeException("Couldn't find port: " + servicePortName + " for service:" + serviceName); } return (serviceProto + "://" + srv.getSpec().getPortalIP() + ":" + port.getPort()).toLowerCase(); } /** * Returns the URL to access the service; using the environment variables, routes * or service portalIP address * * @throws IllegalArgumentException if the URL cannot be found for the serviceName and namespace */ public static String getServiceURLInCurrentNamespace(KubernetesClient client, String serviceName, String serviceProtocol, String servicePortName, boolean serviceExternal) { Service srv = null; String serviceHost = serviceToHost(serviceName); String servicePort = serviceToPort(serviceName, servicePortName); String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort); //1. Inside Kubernetes: Services as ENV vars if (!serviceExternal && Strings.isNotBlank(serviceHost) && Strings.isNotBlank(servicePort) && Strings.isNotBlank(serviceProtocol)) { return serviceProtocol + "://" + serviceHost + ":" + servicePort; //2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests. } else { srv = client.services().withName(serviceName).get(); } if (srv == null) { throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName); } if (Strings.isNullOrBlank(servicePortName) && isOpenShift(client)) { OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class); RouteList routeList = openShiftClient.routes().list(); for (Route route : routeList.getItems()) { if (route.getSpec().getTo().getName().equals(serviceName)) { return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase(); } } } ServicePort port = findServicePortByName(srv, servicePortName); if (port == null) { throw new RuntimeException("Couldn't find port: " + servicePortName + " for service:" + serviceName); } return (serviceProto + "://" + srv.getSpec().getPortalIP() + ":" + port.getPort()).toLowerCase(); } public static String serviceToHost(String id) { return Systems.getEnvVarOrSystemProperty(toEnvVariable(id + HOST_SUFFIX), ""); } public static String serviceToPort(String serviceId) { return serviceToPort(serviceId, null); } public static String serviceToPort(String serviceId, String portName) { String name = serviceId + PORT_SUFFIX + (Strings.isNotBlank(portName) ? "_" + portName : ""); return Systems.getEnvVarOrSystemProperty(toEnvVariable(name), ""); } public static String serviceToProtocol(String id, String servicePort) { return Systems.getEnvVarOrSystemProperty(toEnvVariable(id + PORT_SUFFIX + "_" + servicePort + PROTO_SUFFIX), DEFAULT_PROTO); } public static String toEnvVariable(String str) { return str.toUpperCase().replaceAll("-", "_"); } /** * Returns the port for the given port number on the pod */ public static ContainerPort findContainerPort(Pod pod, Integer portNumber) { List<Container> containers = KubernetesHelper.getContainers(pod); for (Container container : containers) { List<ContainerPort> ports = container.getPorts(); for (ContainerPort port : ports) { if (Objects.equal(portNumber, port.getContainerPort())) { return port; } } } return null; } public static ServicePort findServicePortByName(Service service, String portName) { if (Strings.isNullOrBlank(portName)) { return service.getSpec().getPorts().iterator().next(); } for (ServicePort servicePort : service.getSpec().getPorts()) { if (servicePort.getName().equals(portName)) { return servicePort; } } return null; } /** * Returns the port for the given port name */ public static ContainerPort findContainerPortByName(Pod pod, String name) { List<Container> containers = KubernetesHelper.getContainers(pod); for (Container container : containers) { List<ContainerPort> ports = container.getPorts(); for (ContainerPort port : ports) { if (Objects.equal(name, port.getName())) { return port; } } } return null; } /** * Returns the port for the given port number or name */ public static ContainerPort findContainerPortByNumberOrName(Pod pod, String numberOrName) { Integer portNumber = toOptionalNumber(numberOrName); if (portNumber != null) { return findContainerPort(pod, portNumber); } else { return findContainerPortByName(pod, numberOrName); } } /** * Returns the number if it can be parsed or null */ protected static Integer toOptionalNumber(String text) { if (Strings.isNotBlank(text)) { try { return Integer.parseInt(text); } catch (NumberFormatException e) { // ignore parse errors } } return null; } public static PodStatusType getPodStatus(Pod pod) { String text = getPodStatusText(pod); if (Strings.isNotBlank(text)) { text = text.toLowerCase(); if (text.startsWith("run")) { return PodStatusType.OK; } else if (text.startsWith("wait")) { return PodStatusType.WAIT; } else { return PodStatusType.ERROR; } } return PodStatusType.WAIT; } /** * Returns true if the pod is running */ public static boolean isPodRunning(Pod pod) { PodStatusType status = getPodStatus(pod); return Objects.equal(status, PodStatusType.OK); } public static String getPodStatusText(Pod pod) { if (pod != null) { PodStatus podStatus = pod.getStatus(); if (podStatus != null) { return podStatus.getPhase(); } } return null; } /** * Returns the environment variable value for the first container which has a value for it in th epod */ public static String getPodEnvVar(Pod pod, String envVarName) { if (pod != null) { PodSpec spec = pod.getSpec(); if (spec != null) { List<Container> containers = spec.getContainers(); if (containers != null) { for (Container container : containers) { String answer = getContainerEnvVar(container, envVarName); if (Strings.isNotBlank(answer)) { return answer; } } } } } return null; } /** * Returns the environment variable value for the given container and name */ public static String getContainerEnvVar(Container container, String envVarName) { if (container != null) { List<EnvVar> env = container.getEnv(); if (env != null) { for (EnvVar envVar : env) { if (Objects.equal(envVarName, envVar.getName())) { return envVar.getValue(); } } } } return null; } /** * Returns the pods for the given replication controller */ @SuppressWarnings("unchecked") public static List<Pod> getPodsForReplicationController(ReplicationController replicationController, Iterable<Pod> pods) { ReplicationControllerSpec replicationControllerSpec = replicationController.getSpec(); if (replicationControllerSpec == null) { LOG.warn("Cannot instantiate replication controller: " + getName(replicationController) + " due to missing ReplicationController.Spec!"); } else { Map<String, String> replicaSelector = replicationControllerSpec.getSelector(); Filter<Pod> podFilter = KubernetesHelper.createPodFilter(replicaSelector); return Filters.filter(pods, podFilter); } return Collections.EMPTY_LIST; } /** * Returns the pods for the given service */ public static List<Pod> getPodsForService(Service service, Iterable<Pod> pods) { Map<String, String> selector = getSelector(service); Filter<Pod> podFilter = KubernetesHelper.createPodFilter(selector); return Filters.filter(pods, podFilter); } /** * Looks up the service endpoints in DNS. * <p/> * Endpoints are registered as SRV records in DNS so this method returns * endpoints in the format "host:port". This is a list as SRV records are ordered * by priority & weight before being returned to the client. * <p/> * See https://github.com/GoogleCloudPlatform/kubernetes/blob/master/cluster/addons/dns/README.md */ public static List<String> lookupServiceEndpointsInDns(String serviceName) throws IllegalArgumentException, UnknownHostException { try { Lookup l = new Lookup(serviceName, Type.SRV); Record[] records = l.run(); if (l.getResult() == Lookup.SUCCESSFUL) { SRVRecord[] srvRecords = Arrays.copyOf(records, records.length, SRVRecord[].class); Arrays.sort(srvRecords, new Comparator<SRVRecord>() { @Override public int compare(SRVRecord a, SRVRecord b) { int ret = Integer.compare(b.getPriority(), a.getPriority()); if (ret == 0) { ret = Integer.compare(b.getWeight(), a.getWeight()); } return ret; } }); List<String> endpointAddresses = new ArrayList<>(srvRecords.length); for (SRVRecord srvRecord : srvRecords) { endpointAddresses.add(srvRecord.getTarget().toString(true).concat(":").concat(String.valueOf(srvRecord.getPort()))); } return endpointAddresses; } else { LOG.warn("Lookup {} result: {}", serviceName, l.getErrorString()); } } catch (TextParseException e) { LOG.error("Unparseable service name: {}", serviceName, e); } catch (ClassCastException e) { LOG.error("Invalid response from DNS server - should have been A records", e); } return Collections.EMPTY_LIST; } /** * Looks up the service in DNS. * If this is a headless service, this call returns the endpoint IPs from DNS. * If this is a non-headless service, this call returns the service IP only. * <p/> * See https://github.com/GoogleCloudPlatform/kubernetes/blob/master/docs/services.md#headless-services */ public static Set<String> lookupServiceInDns(String serviceName) throws IllegalArgumentException, UnknownHostException { try { Lookup l = new Lookup(serviceName); Record[] records = l.run(); if (l.getResult() == Lookup.SUCCESSFUL) { Set<String> endpointAddresses = new HashSet<>(records.length); for (int i = 0; i < records.length; i++) { ARecord aRecord = (ARecord) records[i]; endpointAddresses.add(aRecord.getAddress().getHostAddress()); } return endpointAddresses; } else { LOG.warn("Lookup {} result: {}", serviceName, l.getErrorString()); } } catch (TextParseException e) { LOG.error("Unparseable service name: {}", serviceName, e); } catch (ClassCastException e) { LOG.error("Invalid response from DNS server - should have been A records", e); } return Collections.EMPTY_SET; } public static boolean isServiceSsl(String host, int port, boolean trustAllCerts) { try { LOG.info("Checking if a service is SSL on " + host + ":" + port); SSLSocketFactory sslsocketfactory; if (trustAllCerts) { sslsocketfactory = TrustEverythingSSLTrustManager.getTrustingSSLSocketFactory(); } else { sslsocketfactory = (SSLSocketFactory) SSLSocketFactory.getDefault(); } Socket socket = sslsocketfactory.createSocket(); // Connect, with an explicit timeout value socket.connect(new InetSocketAddress(host, port), 1 * 1000); try { InputStream in = socket.getInputStream(); OutputStream out = socket.getOutputStream(); // Write a test byte to get a reaction :) out.write(1); while (in.available() > 0) { System.out.print(in.read()); } return true; } finally { LOG.info("Checked if a service is SSL on " + host + ":" + port); socket.close(); } } catch (SSLHandshakeException e) { LOG.error("SSL handshake failed - this probably means that you need to trust the kubernetes root SSL certificate or set the environment variable " + Utils.convertSystemPropertyNameToEnvVar(io.fabric8.kubernetes.client.Config.KUBERNETES_TRUST_CERT_SYSTEM_PROPERTY), e); } catch (SSLProtocolException e) { LOG.error("SSL protocol error", e); } catch (SSLKeyException e) { LOG.error("Bad SSL key", e); } catch (SSLPeerUnverifiedException e) { LOG.error("Could not verify server", e); } catch (SSLException e) { LOG.debug("Address does not appear to be SSL-enabled - falling back to http", e); } catch (IOException e) { LOG.debug("Failed to validate service", e); } return false; } /** * Validates that the given value is valid according to the kubernetes ID parsing rules, throwing an exception if not. */ public static String validateKubernetesId(String currentValue, String description) throws IllegalArgumentException { if (isNullOrBlank(currentValue)) { throw new IllegalArgumentException("No " + description + " is specified!"); } int size = currentValue.length(); for (int i = 0; i < size; i++) { char ch = currentValue.charAt(i); if (Character.isUpperCase(ch)) { throw new IllegalArgumentException("Invalid upper case letter '" + ch + "' at index " + i + " for " + description + " value: " + currentValue); } } return currentValue; } public static Date parseDate(String text) { try { return dateTimeFormat.parse(text); } catch (ParseException e) { LOG.warn("Failed to parse date: " + text + ". Reason: " + e); return null; } } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Object object) { if (object instanceof Route) { return summaryText((Route) object); } else if (object instanceof Service) { return summaryText((Service) object); } else if (object instanceof ReplicationController) { return summaryText((ReplicationController) object); } else if (object instanceof Pod) { return summaryText((Pod) object); } else if (object instanceof Template) { return summaryText((Template) object); } else if (object instanceof DeploymentConfig) { return summaryText((DeploymentConfig) object); } else if (object instanceof OAuthClient) { return summaryText((OAuthClient) object); } else if (object instanceof String) { return object.toString(); } return ""; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Route entity) { RouteSpec spec = entity.getSpec(); if (spec == null) { return "No spec!"; } return "host: " + spec.getHost(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(ContainerState entity) { ContainerStateRunning running = entity.getRunning(); if (running != null) { return "Running"; } ContainerStateWaiting waiting = entity.getWaiting(); if (waiting != null) { return "Waiting"; } ContainerStateTerminated termination = entity.getTerminated(); if (termination != null) { return "Terminated"; } return "Unknown"; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Template entity) { StringBuilder buffer = new StringBuilder(); List<Parameter> parameters = entity.getParameters(); if (parameters != null) { for (Parameter parameter : parameters) { String name = parameter.getName(); appendText(buffer, name); } } return "parameters: " + buffer; } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(OAuthClient entity) { return "redirectURIs: " + entity.getRedirectURIs(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Service entity) { StringBuilder portText = new StringBuilder(); ServiceSpec spec = entity.getSpec(); if (spec == null) { return "No spec"; } else { List<ServicePort> ports = spec.getPorts(); if (ports != null) { for (ServicePort port : ports) { Integer number = port.getPort(); if (number != null) { if (portText.length() > 0) { portText.append(", "); } portText.append("").append(number); } } } return "selector: " + spec.getSelector() + " ports: " + portText; } } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(ReplicationController entity) { StringBuilder buffer = new StringBuilder(); ReplicationControllerSpec spec = entity.getSpec(); if (spec != null) { buffer.append("replicas: ").append(spec.getReplicas()); PodTemplateSpec podTemplateSpec = spec.getTemplate(); if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec); } } return buffer.toString(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(DeploymentConfig entity) { StringBuilder buffer = new StringBuilder(); DeploymentConfigSpec spec = entity.getSpec(); if (spec != null) { buffer.append("replicas: " + spec.getReplicas()); PodTemplateSpec podTemplateSpec = spec.getTemplate(); if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec); } } return buffer.toString(); } /** * Returns a short summary text message for the given kubernetes resource */ public static String summaryText(Pod entity) { StringBuilder buffer = new StringBuilder(); PodSpec podSpec = entity.getSpec(); appendSummaryText(buffer, podSpec); return buffer.toString(); } protected static void appendSummaryText(StringBuilder buffer, PodTemplateSpec podTemplateSpec) { if (podTemplateSpec != null) { appendSummaryText(buffer, podTemplateSpec.getSpec()); } } protected static void appendSummaryText(StringBuilder buffer, PodSpec podSpec) { if (podSpec != null) { List<Container> containers = podSpec.getContainers(); if (containers != null) { for (Container container : containers) { String image = container.getImage(); appendText(buffer, "image: " + image); } } } } protected static void appendText(StringBuilder buffer, String text) { if (buffer.length() > 0) { buffer.append(", "); } buffer.append(text); } /** * Creates an IntOrString from the given string which could be a number or a name */ public static IntOrString createIntOrString(int intVal) { IntOrString answer = new IntOrString(); answer.setIntVal(intVal); answer.setKind(INTORSTRING_KIND_INT); return answer; } /** * Creates an IntOrString from the given string which could be a number or a name */ public static IntOrString createIntOrString(String nameOrNumber) { if (isNullOrBlank(nameOrNumber)) { return null; } else { IntOrString answer = new IntOrString(); Integer intVal = null; try { intVal = Integer.parseInt(nameOrNumber); } catch (Exception e) { // ignore invalid number } if (intVal != null) { answer.setIntVal(intVal); answer.setKind(INTORSTRING_KIND_INT); } else { answer.setStrVal(nameOrNumber); answer.setKind(INTORSTRING_KIND_STRING); } return answer; } } public static String getStatusText(PodStatus podStatus) { String status; List<String> statusList = new ArrayList<>(); List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses(); for (ContainerStatus containerStatus : containerStatuses) { ContainerState state = containerStatus.getState(); String statusText = summaryText(state); if (statusText != null) { statusList.add(statusText); } } if (statusList.size() == 1) { status = statusList.get(0); } else { status = statusList.toString(); } return status; } public static Secret validateSecretExists(KubernetesClient kubernetes, String namespace, String secretName) { Secret secret = null; try { secret = kubernetes.secrets().inNamespace(namespace).withName(secretName).get(); } catch (KubernetesClientException e) { if (e.getCode() == 404 || e.getCode() == 403) { // does not exist or namespace does not exists } else { throw e; } } if (secret == null) { throw new IllegalArgumentException("No secret named: " + secretName + " for namespace " + namespace + " is available on Kubernetes" + ". For how to create secrets see: http://fabric8.io/guide/fabric8OnOpenShift.html#requirements "); } else { return secret; } } /** * Converts the DTO loaded from JSON to a {@link KubernetesList} assuming its not a {@link Template} */ public static KubernetesList asKubernetesList(Object dto) throws IOException { if (dto instanceof KubernetesList) { return (KubernetesList) dto; } else { KubernetesList answer = new KubernetesList(); List<HasMetadata> items = toItemList(dto); answer.setItems(items); return answer; } } /** * Returns true if this object is a pure kubernetes DTO */ public static boolean isPureKubernetes(HasMetadata item) { if (item != null) { String name = item.getClass().getName(); return name.startsWith("io.fabric8.kubernetes"); } return false; } public static boolean isOpenShift(KubernetesClient client) { URL masterUrl = client.getMasterUrl(); if (IS_OPENSHIFT.containsKey(masterUrl)) { return IS_OPENSHIFT.get(masterUrl); } else { RootPaths rootPaths = client.rootPaths(); if (rootPaths != null) { List<String> paths = rootPaths.getPaths(); if (paths != null) { for (String path : paths) { if (java.util.Objects.equals("/oapi", path) || java.util.Objects.equals("oapi", path)) { IS_OPENSHIFT.putIfAbsent(masterUrl, true); return true; } } } } } IS_OPENSHIFT.putIfAbsent(masterUrl, false); return false; } }
Resolves #4839 maven fabric8:json - Output json in pretty format
components/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/KubernetesHelper.java
Resolves #4839 maven fabric8:json - Output json in pretty format
<ide><path>omponents/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/KubernetesHelper.java <ide> * Saves the json object to the given file <ide> */ <ide> public static void saveJson(File json, Object object) throws IOException { <del> OBJECT_MAPPER.writer().writeValue(json, object); <add> OBJECT_MAPPER.writer().withDefaultPrettyPrinter().writeValue(json, object); <ide> } <ide> <ide> /**
Java
unlicense
47a82994f4a64d3e14c5367705d943791a619249
0
eduardog3000/EnderIO,torteropaid/EnderIO,Quantum64/EnderIO,mezz/EnderIO,eduardog3000/EnderIO,Vexatos/EnderIO,Samernieve/EnderIO,SleepyTrousers/EnderIO,MatthiasMann/EnderIO,HenryLoenwind/EnderIO,Vexatos/EnderIO,mmelvin0/EnderIO,Joccob/EnderIO,Joccob/EnderIO,D-Inc/EnderIO,MrNuggelz/EnderIO
package cofh.api.energy; import net.minecraftforge.common.util.ForgeDirection; /** * Implement this interface on TileEntities which should handle energy, generally storing it in one or more internal {@link IEnergyStorage} objects. * * A reference implementation is provided {@link TileEnergyHandler}. * * @author King Lemming * */ public interface IEnergyHandler extends IEnergyConnection { /** * Add energy to an IEnergyHandler, internal distribution is left entirely to the IEnergyHandler. * * @param from * Orientation the energy is received from. * @param maxReceive * Maximum amount of energy to receive. * @param simulate * If TRUE, the charge will only be simulated. * @return Amount of energy that was (or would have been, if simulated) received. */ int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate); /** * Remove energy from an IEnergyHandler, internal distribution is left entirely to the IEnergyHandler. * * @param from * Orientation the energy is extracted from. * @param maxExtract * Maximum amount of energy to extract. * @param simulate * If TRUE, the extraction will only be simulated. * @return Amount of energy that was (or would have been, if simulated) extracted. */ int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate); /** * Returns the amount of energy currently stored. */ int getEnergyStored(ForgeDirection from); /** * Returns the maximum amount of energy that can be stored. */ int getMaxEnergyStored(ForgeDirection from); }
src/main/java/cofh/api/energy/IEnergyHandler.java
package cofh.api.energy; import net.minecraftforge.common.util.ForgeDirection; /** * Implement this interface on TileEntities which should handle energy, generally storing it in one or more internal {@link IEnergyStorage} objects. * * A reference implementation is provided {@link TileEnergyHandler}. * * @author King Lemming * */ public interface IEnergyHandler { /** * Add energy to an IEnergyHandler, internal distribution is left entirely to the IEnergyHandler. * * @param from * Orientation the energy is received from. * @param maxReceive * Maximum amount of energy to receive. * @param simulate * If TRUE, the charge will only be simulated. * @return Amount of energy that was (or would have been, if simulated) received. */ int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate); /** * Remove energy from an IEnergyHandler, internal distribution is left entirely to the IEnergyHandler. * * @param from * Orientation the energy is extracted from. * @param maxExtract * Maximum amount of energy to extract. * @param simulate * If TRUE, the extraction will only be simulated. * @return Amount of energy that was (or would have been, if simulated) extracted. */ int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate); /** * Returns true if the Handler functions on a given side - if a Tile Entity can receive or send energy on a given side, this should return true. */ boolean canInterface(ForgeDirection from); /** * Returns the amount of energy currently stored. */ int getEnergyStored(ForgeDirection from); /** * Returns the maximum amount of energy that can be stored. */ int getMaxEnergyStored(ForgeDirection from); }
Update RF API
src/main/java/cofh/api/energy/IEnergyHandler.java
Update RF API
<ide><path>rc/main/java/cofh/api/energy/IEnergyHandler.java <ide> * @author King Lemming <ide> * <ide> */ <del>public interface IEnergyHandler { <add>public interface IEnergyHandler extends IEnergyConnection { <ide> <ide> /** <ide> * Add energy to an IEnergyHandler, internal distribution is left entirely to the IEnergyHandler. <ide> int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate); <ide> <ide> /** <del> * Returns true if the Handler functions on a given side - if a Tile Entity can receive or send energy on a given side, this should return true. <del> */ <del> boolean canInterface(ForgeDirection from); <del> <del> /** <ide> * Returns the amount of energy currently stored. <ide> */ <ide> int getEnergyStored(ForgeDirection from);
JavaScript
mit
1bca84dad222ed073e1461661b95d6bc4b25ed9c
0
shanetechwiz/node-steamcardbot
var Steam = require('steam'); var SteamUser = require('steam-user'); var TradeOfferManager = require('steam-tradeoffer-manager'); var SteamTotp = require('steam-totp'); var Steamcommunity = require('steamcommunity'); var SteamWebLogOn = require('steam-weblogon'); var util = require('util'); var UInt64 = require('cuint').UINT64; var client = new SteamUser(); var steamClient = new Steam.SteamClient(); var steamUser = new Steam.SteamUser(steamClient); var steamFriends = new Steam.SteamFriends(steamClient); var steamWebLogOn = new SteamWebLogOn(steamClient, steamUser); var community = new Steamcommunity(); var manager = new TradeOfferManager({ "steam": client, "domain": "example.com", "language": "en" }); var config = require('./config'); var code = SteamTotp.generateAuthCode(config.bot.shared_secret); var logOnOptions = { account_name: config.bot.username, password: config.bot.password, two_factor_code: code } function log(message) { console.log(new Date().toString() + ' - ' + message); steamFriends.sendMessage(config.admin.Oshane, message.toString()); } function steamIdObjectToSteamId64(steamIdObject) { return new UInt64(steamIdObject.accountid, (steamIdObject.universe << 24) | (steamIdObject.type << 20) | (steamIdObject.instance)).toString(); } function Login(logOnOptions) { steamClient.connect(); steamClient.on('connected', function() { log('Connected...'); steamUser.logOn(logOnOptions); }); steamClient.on('logOnResponse', function(logonResp) { if (logonResp.eresult === Steam.EResult.OK) { log('Login Successful!'); steamFriends.setPersonaState(Steam.EPersonaState.Online); steamWebLogOn.webLogOn(function(sessionID, cookies) { manager.setCookies(cookies, function(err) { if(err) { log(err); process.exit(1); return; } }); community.setCookies(cookies); community.startConfirmationChecker(30000, config.bot.identity_secret); if(community.chatState == 0) { community.chatLogon(); } community.on('chatMessage', function(sender, text) { handleChatMessages(sender, text); }); }); } else { log(logonResp.eresult); } }); } function handleChatMessages(steamID, message) { steamID = steamIdObjectToSteamId64(steamID); message = message.trim(); var friendList = steamFriends.friends; if(friendList[steamID] && friendList[steamID] == Steam.EFriendRelationship.Friend) { if(message.indexOf('!help') > -1) { steamFriends.sendMessage(steamID, config.message.help.toString()); } else if(message.indexOf('!buy') > -1) { numberOfKeys = message.replace ( /[^\d.]/g, '' ); if(isNaN(numberOfKeys) == true) { steamFriends.sendMessage(steamID, config.message.invalid_number_of_keys.toString()); } else { if(numberOfKeys > config.max_number_of_keys) { steamFriends.sendMessage(steamID, config.message.excess_keys.toString()); } else { sellSets(steamID, Math.round(numberOfKeys)); steamFriends.sendMessage(steamID, config.message.buy.toString()); } } } else { steamFriends.sendMessage(steamID, config.message.invalid_command.toString()); } } else { community.chatMessage(steamID, config.message.not_in_friendlist.toString()); } } var getSpecificItemFromInventoryByTagName = function(inventory, tagName) { var inventoryItems = []; inventory.forEach(function(inventoryItem) { if(inventoryItem.tags) { inventoryItem.tags.forEach(function(tag) { if(tag.name && tag.name == tagName) { inventoryItems.push(inventoryItem); } }); } }); return inventoryItems; } var getSpecificNumberOfItemsFromInventory = function(itemInventory, numberOfItems) { var items = []; for(var i = 0; i < numberOfItems; i++) { if(i < itemInventory.length) { var item = itemInventory[i]; items.push({ assetid: item.assetid, appid: item.appid, contextid: item.contextid, amount: 1}); } } return items; } var getSmallerNumber = function(first, second) { return Math.min(first, second); } function sellSets(steamID, numberOfKeys) { var theirItems = []; var myItems = []; manager.getUserInventoryContents(steamID, config.app_id.csgo, config.context_id.keys, true, function(err, userInventory, userCurrencies) { userInventory = getSpecificItemFromInventoryByTagName(userInventory, 'Key'); theirItems = getSpecificNumberOfItemsFromInventory(userInventory, numberOfKeys); if(theirItems.length > 0) { manager.getInventoryContents(config.app_id.steam, config.context_id.cards, true, function(err, inventory, currencies) { numberOfKeys = getSmallerNumber(numberOfKeys, theirItems.length); inventory = getSpecificItemFromInventoryByTagName(inventory, 'Trading Card'); var numberOfCardSets = numberOfKeys * config.sets_per_key; myItems = getSpecificNumberOfItemsFromInventory(inventory, numberOfCardSets); if(myItems.length > 0) { var offer = manager.createOffer(steamID); offer.addMyItems(myItems); offer.addTheirItems(theirItems); offer.setMessage(config.message.tradeoffer.toString()); offer.send(function(err, status) { if(err) { log('Sale of cards failed: ' + err); return; } if(status == 'pending') { community.checkConfirmations(); log('checkConfirmations executed'); } steamFriends.sendMessage(steamID, config.message.cards_sold.toString()); }); } }); } }); } Login(logOnOptions); steamFriends.on('friend', function(steamID, relationship) { if(relationship == Steam.EFriendRelationship.RequestRecipient) { steamFriends.addFriend(steamID); steamFriends.sendMessage(steamID, config.message.welcome.toString()); } });
bot.js
var Steam = require('steam'); var SteamUser = require('steam-user'); var TradeOfferManager = require('steam-tradeoffer-manager'); var SteamTotp = require('steam-totp'); var Steamcommunity = require('steamcommunity'); var SteamWebLogOn = require('steam-weblogon'); var util = require('util'); var UInt64 = require('cuint').UINT64; var client = new SteamUser(); var steamClient = new Steam.SteamClient(); var steamUser = new Steam.SteamUser(steamClient); var steamFriends = new Steam.SteamFriends(steamClient); var steamWebLogOn = new SteamWebLogOn(steamClient, steamUser); var community = new Steamcommunity(); var manager = new TradeOfferManager({ "steam": client, "domain": "example.com", "language": "en" }); var config = require('./config'); var code = SteamTotp.generateAuthCode(config.bot.shared_secret); var logOnOptions = { account_name: config.bot.username, password: config.bot.password, two_factor_code: code } function log(message) { console.log(new Date().toString() + ' - ' + message); steamFriends.sendMessage(config.admin.Oshane, message.toString()); } function steamIdObjectToSteamId64(steamIdObject) { return new UInt64(steamIdObject.accountid, (steamIdObject.universe << 24) | (steamIdObject.type << 20) | (steamIdObject.instance)).toString(); } function Login(logOnOptions) { steamClient.connect(); steamClient.on('connected', function() { log('Connected...'); steamUser.logOn(logOnOptions); }); steamClient.on('logOnResponse', function(logonResp) { if (logonResp.eresult === Steam.EResult.OK) { log('Login Successful!'); steamFriends.setPersonaState(Steam.EPersonaState.Online); steamWebLogOn.webLogOn(function(sessionID, cookies) { manager.setCookies(cookies, function(err) { if(err) { log(err); process.exit(1); return; } }); community.setCookies(cookies); community.startConfirmationChecker(30000, config.bot.identity_secret); if(community.chatState == 0) { community.chatLogon(); } community.on('chatMessage', function(sender, text) { handleChatMessages(sender, text); }); }); } else { log(logonResp.eresult); } }); } function handleChatMessages(steamID, message) { steamID = steamIdObjectToSteamId64(steamID); message = message.trim(); var friendList = steamFriends.friends; if(friendList[steamID] && friendList[steamID] == Steam.EFriendRelationship.Friend) { if(message.indexOf('!help') > -1) { steamFriends.sendMessage(steamID, config.message.help.toString()); } else if(message.indexOf('!buy') > -1) { numberOfKeys = parseInt(message.substring(message.indexOf(' ') + 1, message.length)); if(isNaN(numberOfKeys) == true) { steamFriends.sendMessage(steamID, config.message.invalid_number_of_keys.toString()); } else { if(numberOfKeys > config.max_number_of_keys) { steamFriends.sendMessage(steamID, config.message.excess_keys.toString()); } else { sellSets(steamID, Math.round(numberOfKeys)); steamFriends.sendMessage(steamID, config.message.buy.toString()); } } } else { steamFriends.sendMessage(steamID, config.message.invalid_command.toString()); } } else { community.chatMessage(steamID, config.message.not_in_friendlist.toString()); } } var getSpecificItemFromInventoryByTagName = function(inventory, tagName) { var inventoryItems = []; inventory.forEach(function(inventoryItem) { if(inventoryItem.tags) { inventoryItem.tags.forEach(function(tag) { if(tag.name && tag.name == tagName) { inventoryItems.push(inventoryItem); } }); } }); return inventoryItems; } var getSpecificNumberOfItemsFromInventory = function(itemInventory, numberOfItems) { var items = []; for(var i = 0; i < numberOfItems; i++) { if(i < itemInventory.length) { var item = itemInventory[i]; items.push({ assetid: item.assetid, appid: item.appid, contextid: item.contextid, amount: 1}); } } return items; } var getSmallerNumber = function(first, second) { return Math.min(first, second); } function sellSets(steamID, numberOfKeys) { var theirItems = []; var myItems = []; manager.getUserInventoryContents(steamID, config.app_id.csgo, config.context_id.keys, true, function(err, userInventory, userCurrencies) { userInventory = getSpecificItemFromInventoryByTagName(userInventory, 'Key'); theirItems = getSpecificNumberOfItemsFromInventory(userInventory, numberOfKeys); if(theirItems.length > 0) { manager.getInventoryContents(config.app_id.steam, config.context_id.cards, true, function(err, inventory, currencies) { numberOfKeys = getSmallerNumber(numberOfKeys, theirItems.length); inventory = getSpecificItemFromInventoryByTagName(inventory, 'Trading Card'); var numberOfCardSets = numberOfKeys * config.sets_per_key; myItems = getSpecificNumberOfItemsFromInventory(inventory, numberOfCardSets); if(myItems.length > 0) { var offer = manager.createOffer(steamID); offer.addMyItems(myItems); offer.addTheirItems(theirItems); offer.setMessage(config.message.tradeoffer.toString()); offer.send(function(err, status) { if(err) { log('Sale of cards failed: ' + err); return; } if(status == 'pending') { community.checkConfirmations(); log('checkConfirmations executed'); } steamFriends.sendMessage(steamID, config.message.cards_sold.toString()); }); } }); } }); } Login(logOnOptions); steamFriends.on('friend', function(steamID, relationship) { if(relationship == Steam.EFriendRelationship.RequestRecipient) { steamFriends.addFriend(steamID); steamFriends.sendMessage(steamID, config.message.welcome.toString()); } });
Replaced parseint with regex (it is faster) Line 98
bot.js
Replaced parseint with regex (it is faster) Line 98
<ide><path>ot.js <ide> } <ide> else if(message.indexOf('!buy') > -1) { <ide> <del> numberOfKeys = parseInt(message.substring(message.indexOf(' ') + 1, message.length)); <add> numberOfKeys = message.replace ( /[^\d.]/g, '' ); <ide> <ide> if(isNaN(numberOfKeys) == true) { steamFriends.sendMessage(steamID, config.message.invalid_number_of_keys.toString()); } <ide> else {
Java
apache-2.0
f848a3930831b8ef961a0023ff680e16a1c920cf
0
amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security; import java.util.Collection; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.oak.commons.PropertiesUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * ConfigurationParameters is a convenience class that allows typed access to configuration properties. It implements * the {@link Map} interface but is immutable. */ public final class ConfigurationParameters implements Map<String, Object> { /** * internal logger */ private static final Logger log = LoggerFactory.getLogger(ConfigurationParameters.class); /** * An empty configuration parameters */ public static final ConfigurationParameters EMPTY = new ConfigurationParameters(); /** * internal map of the config parameters */ private final Map<String, Object> options; /** * creates an empty config parameters instance. * Note: the constructor is private to avoid creation of empty maps. */ private ConfigurationParameters() { this.options = Collections.emptyMap(); } /** * Creates an config parameter instance. * Note: the constructor is private to avoid creation of empty maps. * @param options the source options. */ private ConfigurationParameters(@Nonnull Map<String, ?> options) { this.options = Collections.unmodifiableMap(options); } /** * Creates a new configuration parameters instance by merging all {@code params} sequentially. * I.e. property define in subsequent arguments overwrite the ones before. * * @param params source parameters to merge * @return merged configuration parameters or {@link #EMPTY} if all source params were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull ConfigurationParameters... params) { Map<String, Object> m = new HashMap<String, Object>(); for (ConfigurationParameters cp : params) { if (cp != null) { m.putAll(cp.options); } } return m.isEmpty() ? EMPTY : new ConfigurationParameters(m); } /** * Creates new a configuration parameters instance by copying the given properties. * @param properties source properties * @return configuration parameters or {@link #EMPTY} if the source properties were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Properties properties) { if (properties.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(properties.size()); for (Object name : properties.keySet()) { final String key = name.toString(); options.put(key, properties.get(key)); } return new ConfigurationParameters(options); } /** * Creates new a configuration parameters instance by copying the given properties. * @param properties source properties * @return configuration parameters or {@link #EMPTY} if the source properties were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Dictionary<String, Object> properties) { if (properties.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(properties.size()); for (Enumeration<String> keys = properties.keys(); keys.hasMoreElements();) { String key = keys.nextElement(); options.put(key, properties.get(key)); } return new ConfigurationParameters(options); } /** * Creates new a configuration parameters instance by copying the given map. * @param map source map * @return configuration parameters or {@link #EMPTY} if the source map was empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Map<?, ?> map) { if (map.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(map.size()); for (Map.Entry<?,?> e : map.entrySet()) { options.put(String.valueOf(e.getKey()), e.getValue()); } return new ConfigurationParameters(options); } /** * Creates new a single valued configuration parameters instance from the * given key and value. * * @param key The key * @param value The value * @return a new instance of configuration parameters. */ @Nonnull public static ConfigurationParameters of(@Nonnull String key, @Nonnull Object value) { return new ConfigurationParameters(ImmutableMap.of(key, value)); } /** * Creates new a configuration parameters instance from the * given key and value pairs. * * @param key1 The key of the first pair. * @param value1 The value of the first pair * @param key2 The key of the second pair. * @param value2 The value of the second pair. * @return a new instance of configuration parameters. */ @Nonnull public static ConfigurationParameters of(@Nonnull String key1, @Nonnull Object value1, @Nonnull String key2, @Nonnull Object value2) { return new ConfigurationParameters(ImmutableMap.of(key1, value1, key2, value2)); } /** * Returns {@code true} if this instance contains a configuration entry with * the specified key irrespective of the defined value; {@code false} otherwise. * * @param key The key to be tested. * @return {@code true} if this instance contains a configuration entry with * the specified key irrespective of the defined value; {@code false} otherwise. */ public boolean contains(@Nonnull String key) { return options.containsKey(key); } /** * Returns the value of the configuration entry with the given {@code key} * applying the following rules: * * <ul> * <li>If this instance doesn't contain a configuration entry with that * key the specified {@code defaultValue} will be returned.</li> * <li>If {@code defaultValue} is {@code null} the original value will * be returned.</li> * <li>If the configured value is {@code null} this method will always * return {@code null}.</li> * <li>If neither {@code defaultValue} nor the configured value is * {@code null} an attempt is made to convert the configured value to * match the type of the default value.</li> * </ul> * * @param key The name of the configuration option. * @param defaultValue The default value to return if no such entry exists * or to use for conversion. * @param targetClass The target class * @return The original or converted configuration value or {@code null}. */ @CheckForNull public <T> T getConfigValue(@Nonnull String key, @Nullable T defaultValue, @Nullable Class<T> targetClass) { if (options.containsKey(key)) { return convert(options.get(key), defaultValue, targetClass); } else { return defaultValue; } } /** * Returns the value of the configuration entry with the given {@code key} * applying the following rules: * * <ul> * <li>If this instance doesn't contain a configuration entry with that * key, or if the entry is {@code null}, the specified {@code defaultValue} will be returned.</li> * <li>If the configured value is not {@code null} an attempt is made to convert the configured value to * match the type of the default value.</li> * </ul> * * @param key The name of the configuration option. * @param defaultValue The default value to return if no such entry exists * or to use for conversion. * @return The original or converted configuration value or {@code null}. */ @Nonnull public <T> T getConfigValue(@Nonnull String key, @Nonnull T defaultValue) { Object property = options.get(key); if (property == null) { return defaultValue; } else { T value = convert(property, defaultValue, null); return (value == null) ? defaultValue : value; } } //--------------------------------------------------------< private >--- @SuppressWarnings("unchecked") @Nullable private static <T> T convert(@Nullable Object configProperty, @Nullable T defaultValue, @Nullable Class<T> targetClass) { if (configProperty == null) { return null; } String str = configProperty.toString(); Class clazz = targetClass; if (clazz == null) { clazz = (defaultValue == null) ? configProperty.getClass() : defaultValue.getClass(); } try { if (clazz.isAssignableFrom(configProperty.getClass())) { return (T) configProperty; } else if (clazz == String.class) { return (T) str; } else if (clazz == Milliseconds.class) { Milliseconds ret = Milliseconds.of(str); return (T) ret == null ? defaultValue : (T) ret; } else if (clazz == Integer.class || clazz == int.class) { return (T) Integer.valueOf(str); } else if (clazz == Long.class || clazz == long.class) { return (T) Long.valueOf(str); } else if (clazz == Float.class || clazz == float.class) { return (T) Float.valueOf(str); } else if (clazz == Double.class || clazz == double.class) { return (T) Double.valueOf(str); } else if (clazz == Boolean.class || clazz == boolean.class) { return (T) Boolean.valueOf(str); } else if (clazz == String[].class){ return (T) PropertiesUtil.toStringArray(configProperty, (String[]) defaultValue); } else if (clazz == Set.class || Set.class.isAssignableFrom(clazz)) { if (configProperty instanceof Set) { return (T) configProperty; } else if (configProperty instanceof Collection) { return (T) ImmutableSet.copyOf((Collection) configProperty); } else if (configProperty.getClass().isArray()) { return (T) ImmutableSet.copyOf((Object[]) configProperty); } else { String[] arr = PropertiesUtil.toStringArray(configProperty); if (arr != null) { return (T) ImmutableSet.copyOf(arr); } else { log.warn("Unsupported target type {} for value {}", clazz.getName(), str); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName()); } } } else { // unsupported target type log.warn("Unsupported target type {} for value {}", clazz.getName(), str); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName()); } } catch (NumberFormatException e) { log.warn("Invalid value {}; cannot be parsed into {}", str, clazz.getName()); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName(), e); } } //-----------------------------------------------------------------------------------< Map interface delegation >--- /** * {@inheritDoc} */ @Override public int size() { return options.size(); } /** * {@inheritDoc} */ @Override public boolean isEmpty() { return options.isEmpty(); } /** * {@inheritDoc} */ @Override public boolean containsKey(Object key) { return options.containsKey(key); } /** * {@inheritDoc} */ @Override public boolean containsValue(Object value) { return options.containsValue(value); } /** * {@inheritDoc} */ @Override public Object get(Object key) { return options.get(key); } /** * {@inheritDoc} */ @Override public Object put(String key, Object value) { // we rely on the immutability of the delegated map to throw the correct exceptions. return options.put(key, value); } /** * {@inheritDoc} */ @Override public Object remove(Object key) { // we rely on the immutability of the delegated map to throw the correct exceptions. return options.remove(key); } /** * {@inheritDoc} */ @Override public void putAll(Map<? extends String, ?> m) { // we rely on the immutability of the delegated map to throw the correct exceptions. options.putAll(m); } /** * {@inheritDoc} */ @Override public void clear() { // we rely on the immutability of the delegated map to throw the correct exceptions. options.clear(); } /** * {@inheritDoc} */ @Override public Set<String> keySet() { return options.keySet(); } /** * {@inheritDoc} */ @Override public Collection<Object> values() { return options.values(); } /** * {@inheritDoc} */ @Override public Set<Entry<String,Object>> entrySet() { return options.entrySet(); } /** * Helper class for configuration parameters that denote a "duration", such as a timeout or expiration time. */ public static final class Milliseconds { private static final Pattern pattern = Pattern.compile("(\\d+)(\\.\\d+)?(ms|s|m|h|d)?"); public static final Milliseconds NULL = new Milliseconds(0); public static final Milliseconds FOREVER = new Milliseconds(Long.MAX_VALUE); public static final Milliseconds NEVER = new Milliseconds(-1); public final long value; private Milliseconds(long value) { this.value = value; } /** * Returns a new milliseconds object from the given long value. * * @param value the value * @return the milliseconds object */ public static Milliseconds of(long value) { if (value == 0) { return NULL; } else if (value == Long.MAX_VALUE) { return FOREVER; } else if (value < 0) { return NEVER; } else { return new Milliseconds(value); } } /** * Parses a value string into a duration. the String has the following format: * <xmp> * format:= (value [ unit ])+; * value:= float value; * unit: "ms" | "s" | "m" | "h" | "d"; * </xmp> * * Example: * <xmp> * "100", "100ms" : 100 milliseconds * "1s 50ms": 1050 milliseconds * "1.5d": 1 1/2 days == 36 hours. * </xmp> * * @param str the string to parse * @return the new Milliseconds object or null. */ @CheckForNull public static Milliseconds of(@Nullable String str) { if (str == null) { return null; } Matcher m = pattern.matcher(str); long current = -1; while (m.find()) { String number = m.group(1); String decimal = m.group(2); if (decimal != null) { number += decimal; } String unit = m.group(3); double value = Double.valueOf(number); if ("s".equals(unit)) { value *= 1000.0; } else if ("m".equals(unit)) { value *= 60 * 1000.0; } else if ("h".equals(unit)) { value *= 60 * 60 * 1000.0; } else if ("d".equals(unit)) { value *= 24 * 60 * 60 * 1000.0; } current += value; } return current < 0 ? null : new Milliseconds(current + 1); } @Override public boolean equals(Object o) { return this == o || !(o == null || getClass() != o.getClass()) && value == ((Milliseconds) o).value; } @Override public int hashCode() { return (int) (value ^ (value >>> 32)); } } }
oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/ConfigurationParameters.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security; import java.util.Collection; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.oak.commons.PropertiesUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * ConfigurationParameters is a convenience class that allows typed access to configuration properties. It implements * the {@link Map} interface but is immutable. */ public final class ConfigurationParameters implements Map<String, Object> { /** * internal logger */ private static final Logger log = LoggerFactory.getLogger(ConfigurationParameters.class); /** * An empty configuration parameters */ public static final ConfigurationParameters EMPTY = new ConfigurationParameters(); /** * internal map of the config parameters */ private final Map<String, Object> options; /** * creates an empty config parameters instance. * Note: the constructor is private to avoid creation of empty maps. */ private ConfigurationParameters() { this.options = Collections.emptyMap(); } /** * Creates an config parameter instance. * Note: the constructor is private to avoid creation of empty maps. * @param options the source options. */ private ConfigurationParameters(@Nonnull Map<String, ?> options) { this.options = Collections.unmodifiableMap(options); } /** * Creates a new configuration parameters instance by merging all {@code params} sequentially. * I.e. property define in subsequent arguments overwrite the ones before. * * @param params source parameters to merge * @return merged configuration parameters or {@link #EMPTY} if all source params were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull ConfigurationParameters... params) { Map<String, Object> m = new HashMap<String, Object>(); for (ConfigurationParameters cp : params) { if (cp != null) { m.putAll(cp.options); } } return m.isEmpty() ? EMPTY : new ConfigurationParameters(m); } /** * Creates new a configuration parameters instance by copying the given properties. * @param properties source properties * @return configuration parameters or {@link #EMPTY} if the source properties were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Properties properties) { if (properties.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(properties.size()); for (Object name : properties.keySet()) { final String key = name.toString(); options.put(key, properties.get(key)); } return new ConfigurationParameters(options); } /** * Creates new a configuration parameters instance by copying the given properties. * @param properties source properties * @return configuration parameters or {@link #EMPTY} if the source properties were empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Dictionary<String, Object> properties) { if (properties.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(properties.size()); for (Enumeration<String> keys = properties.keys(); keys.hasMoreElements();) { String key = keys.nextElement(); options.put(key, properties.get(key)); } return new ConfigurationParameters(options); } /** * Creates new a configuration parameters instance by copying the given map. * @param map source map * @return configuration parameters or {@link #EMPTY} if the source map was empty. */ @Nonnull public static ConfigurationParameters of(@Nonnull Map<?, ?> map) { if (map.isEmpty()) { return EMPTY; } Map<String, Object> options = new HashMap<String, Object>(map.size()); for (Map.Entry<?,?> e : map.entrySet()) { options.put(String.valueOf(e.getKey()), e.getValue()); } return new ConfigurationParameters(options); } /** * Creates new a single valued configuration parameters instance from the * given key and value. * * @param key The key * @param value The value * @return a new instance of configuration parameters. */ @Nonnull public static ConfigurationParameters of(@Nonnull String key, @Nonnull Object value) { return new ConfigurationParameters(ImmutableMap.of(key, value)); } /** * Creates new a configuration parameters instance from the * given key and value pairs. * * @param key1 The key of the first pair. * @param value1 The value of the first pair * @param key2 The key of the second pair. * @param value2 The value of the second pair. * @return a new instance of configuration parameters. */ @Nonnull public static ConfigurationParameters of(@Nonnull String key1, @Nonnull Object value1, @Nonnull String key2, @Nonnull Object value2) { return new ConfigurationParameters(ImmutableMap.of(key1, value1, key2, value2)); } /** * Returns {@code true} if this instance contains a configuration entry with * the specified key irrespective of the defined value; {@code false} otherwise. * * @param key The key to be tested. * @return {@code true} if this instance contains a configuration entry with * the specified key irrespective of the defined value; {@code false} otherwise. */ public boolean contains(@Nonnull String key) { return options.containsKey(key); } /** * Returns the value of the configuration entry with the given {@code key} * applying the following rules: * * <ul> * <li>If this instance doesn't contain a configuration entry with that * key the specified {@code defaultValue} will be returned.</li> * <li>If {@code defaultValue} is {@code null} the original value will * be returned.</li> * <li>If the configured value is {@code null} this method will always * return {@code null}.</li> * <li>If neither {@code defaultValue} nor the configured value is * {@code null} an attempt is made to convert the configured value to * match the type of the default value.</li> * </ul> * * @param key The name of the configuration option. * @param defaultValue The default value to return if no such entry exists * or to use for conversion. * @param targetClass The target class * @return The original or converted configuration value or {@code null}. */ @CheckForNull public <T> T getConfigValue(@Nonnull String key, @Nullable T defaultValue, @Nullable Class<T> targetClass) { if (options.containsKey(key)) { return convert(options.get(key), defaultValue, targetClass); } else { return defaultValue; } } /** * Returns the value of the configuration entry with the given {@code key} * applying the following rules: * * <ul> * <li>If this instance doesn't contain a configuration entry with that * key, or if the entry is {@code null}, the specified {@code defaultValue} will be returned.</li> * <li>If the configured value is not {@code null} an attempt is made to convert the configured value to * match the type of the default value.</li> * </ul> * * @param key The name of the configuration option. * @param defaultValue The default value to return if no such entry exists * or to use for conversion. * @return The original or converted configuration value or {@code null}. */ @Nonnull public <T> T getConfigValue(@Nonnull String key, @Nonnull T defaultValue) { Object property = options.get(key); if (property == null) { return defaultValue; } else { T value = convert(property, defaultValue, null); return (value == null) ? defaultValue : value; } } //--------------------------------------------------------< private >--- @SuppressWarnings("unchecked") @Nullable private static <T> T convert(@Nullable Object configProperty, @Nullable T defaultValue, @Nullable Class<T> targetClass) { if (configProperty == null) { return null; } String str = configProperty.toString(); Class clazz = targetClass; if (clazz == null) { clazz = (defaultValue == null) ? configProperty.getClass() : defaultValue.getClass(); } try { if (clazz.isAssignableFrom(configProperty.getClass())) { return (T) configProperty; } else if (clazz == String.class) { return (T) str; } else if (clazz == Milliseconds.class) { Milliseconds ret = Milliseconds.of(str); return (T) ret == null ? defaultValue : (T) ret; } else if (clazz == Integer.class || clazz == int.class) { return (T) Integer.valueOf(str); } else if (clazz == Long.class || clazz == long.class) { return (T) Long.valueOf(str); } else if (clazz == Float.class || clazz == float.class) { return (T) Float.valueOf(str); } else if (clazz == Double.class || clazz == double.class) { return (T) Double.valueOf(str); } else if (clazz == Boolean.class || clazz == boolean.class) { return (T) Boolean.valueOf(str); } else if (clazz == String[].class){ return (T) PropertiesUtil.toStringArray(configProperty, (String[]) defaultValue); } else if (clazz == Set.class || Set.class.isAssignableFrom(clazz)) { if (configProperty instanceof Set) { return (T) configProperty; } else if (configProperty instanceof Collection) { return (T) ImmutableSet.copyOf((Collection) configProperty); } else if (configProperty.getClass().isArray()) { return (T) ImmutableSet.copyOf((Object[]) configProperty); } else { String[] arr = PropertiesUtil.toStringArray(configProperty); if (arr != null) { return (T) ImmutableSet.copyOf(arr); } else { log.warn("Unsupported target type {} for value {}", clazz.getName(), str); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName()); } } } else { // unsupported target type log.warn("Unsupported target type {} for value {}", clazz.getName(), str); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName()); } } catch (NumberFormatException e) { log.warn("Invalid value {}; cannot be parsed into {}", str, clazz.getName()); throw new IllegalArgumentException("Cannot convert config entry " + str + " to " + clazz.getName(), e); } } //-----------------------------------------------------------------------------------< Map interface delegation >--- /** * {@inheritDoc} */ @Override public int size() { return options.size(); } /** * {@inheritDoc} */ @Override public boolean isEmpty() { return options.isEmpty(); } /** * {@inheritDoc} */ @Override public boolean containsKey(Object key) { return options.containsKey(key); } /** * {@inheritDoc} */ @Override public boolean containsValue(Object value) { return options.containsValue(value); } /** * {@inheritDoc} */ @Override public Object get(Object key) { return options.get(key); } /** * {@inheritDoc} */ @Override public Object put(String key, Object value) { // we rely on the immutability of the delegated map to throw the correct exceptions. return options.put(key, value); } /** * {@inheritDoc} */ @Override public Object remove(Object key) { // we rely on the immutability of the delegated map to throw the correct exceptions. return options.remove(key); } /** * {@inheritDoc} */ @Override public void putAll(Map<? extends String, ?> m) { // we rely on the immutability of the delegated map to throw the correct exceptions. options.putAll(m); } /** * {@inheritDoc} */ @Override public void clear() { // we rely on the immutability of the delegated map to throw the correct exceptions. options.clear(); } /** * {@inheritDoc} */ @Override public Set<String> keySet() { return options.keySet(); } /** * {@inheritDoc} */ @Override public Collection<Object> values() { return options.values(); } /** * {@inheritDoc} */ @Override public Set<Entry<String,Object>> entrySet() { return options.entrySet(); } /** * Helper class for configuration parameters that denote a "duration", such as a timeout or expiration time. */ public static final class Milliseconds { private static final Pattern pattern = Pattern.compile("(\\d+)(\\.\\d+)?(ms|s|m|h|d)?"); public static final Milliseconds NULL = new Milliseconds(0); public static final Milliseconds FOREVER = new Milliseconds(Long.MAX_VALUE); public static final Milliseconds NEVER = new Milliseconds(-1); public final long value; private Milliseconds(long value) { this.value = value; } /** * Returns a new milliseconds object from the given long value. * @param value the value * @return the milliseconds object */ public static Milliseconds of(long value) { if (value == 0) { return NULL; } else if (value == Long.MAX_VALUE) { return FOREVER; } else if (value < 0) { return NEVER; } else { return new Milliseconds(value); } } /** * Parses a value string into a duration. the String has the following format: * <xmp> * format:= (value [ unit ])+; * value:= float value; * unit: "ms" | "s" | "m" | "h" | "d"; * </xmp> * * Example: * <xmp> * "100", "100ms" : 100 milliseconds * "1s 50ms": 1050 milliseconds * "1.5d": 1 1/2 days == 36 hours. * </xmp> * * @param str the string to parse * @return the new Milliseconds object or null. */ @CheckForNull public static Milliseconds of(@Nullable String str) { if (str == null) { return null; } Matcher m = pattern.matcher(str); long current = -1; while (m.find()) { String number = m.group(1); String decimal = m.group(2); if (decimal != null) { number+=decimal; } String unit = m.group(3); double value = Double.valueOf(number); if ("s".equals(unit)) { value*= 1000.0; } else if ("m".equals(unit)) { value*= 60*1000.0; } else if ("h".equals(unit)) { value*= 60*60*1000.0; } else if ("d".equals(unit)) { value*= 24*60*60*1000.0; } current += value; } return current < 0 ? null : new Milliseconds(current + 1); } @Override public boolean equals(Object o) { return this == o || !(o == null || getClass() != o.getClass()) && value == ((Milliseconds) o).value; } @Override public int hashCode() { return (int) (value ^ (value >>> 32)); } } }
minor improvement: formatting git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1669120 13f79535-47bb-0310-9956-ffa450edef68
oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/ConfigurationParameters.java
minor improvement: formatting
<ide><path>ak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/ConfigurationParameters.java <ide> <ide> /** <ide> * Returns a new milliseconds object from the given long value. <add> * <ide> * @param value the value <ide> * @return the milliseconds object <ide> */ <ide> String number = m.group(1); <ide> String decimal = m.group(2); <ide> if (decimal != null) { <del> number+=decimal; <add> number += decimal; <ide> } <ide> String unit = m.group(3); <ide> double value = Double.valueOf(number); <ide> if ("s".equals(unit)) { <del> value*= 1000.0; <add> value *= 1000.0; <ide> } else if ("m".equals(unit)) { <del> value*= 60*1000.0; <add> value *= 60 * 1000.0; <ide> } else if ("h".equals(unit)) { <del> value*= 60*60*1000.0; <add> value *= 60 * 60 * 1000.0; <ide> } else if ("d".equals(unit)) { <del> value*= 24*60*60*1000.0; <add> value *= 24 * 60 * 60 * 1000.0; <ide> } <ide> current += value; <ide> }
Java
apache-2.0
e129de0f9fe286502f4ae0f04c7e83b803cfdb95
0
romach/itevents,romach/itevents,JuniorsJava/itevents,JuniorsJava/itevents,JuniorsJava/itevents,romach/itevents,romach/itevents,JuniorsJava/itevents
package org.itevents.controller; import org.itevents.model.Event; import org.itevents.model.Location; import org.itevents.service.EventService; import org.itevents.service.EventServiceImpl; import org.springframework.beans.support.PagedListHolder; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.util.ArrayList; import java.util.List; @RestController public class EventRestController { ApplicationContext context = new ClassPathXmlApplicationContext("applicationContext.xml"); private EventService eventService = context.getBean("eventService", EventServiceImpl.class); @RequestMapping(value = "/events/{id}") public ResponseEntity<Event> getEvent(@PathVariable("id") int id) { Event event = eventService.getEvent(id); if (event == null) { return new ResponseEntity(HttpStatus.NOT_FOUND); } return new ResponseEntity<Event>(event, HttpStatus.OK); } /** * REST-method GET that returns list of all events at the location with pagination * * @param page number of page of events' list * @param itemsPerPage number of events placed on the page * @param latitude latitude of the area center * @param longitude longitude of the area center * @param radius radius of the area * @return list of events at the location */ @RequestMapping(method = RequestMethod.GET, value = "/events") public List<Event> getEventsAtLocation(@RequestParam(value = "page") int page, @RequestParam(value = "itemsPerPage") int itemsPerPage, @RequestParam(value = "lat") double latitude, @RequestParam(value = "lon") double longitude, @RequestParam(value = "radius") int radius) { if (itemsPerPage <= 0) { return new ArrayList<>(); } Location location = new Location(latitude, longitude); List<Event> events = eventService.getFutureEventsInRadius(location, radius); int pages = events.size()/itemsPerPage; if (events.size() % itemsPerPage != 0) { pages++; } PagedListHolder<Event> paginatedEvents = new PagedListHolder<Event>(events); paginatedEvents.setPageSize(itemsPerPage); if (page <= 0) { return paginatedEvents.getPageList(); } if (page > pages - 1) { paginatedEvents.setPage(pages - 1); return paginatedEvents.getPageList(); } paginatedEvents.setPage(page); return paginatedEvents.getPageList(); } // radius=10&cityId=23&lat=50.434&lon=30.543&payed=true&techTag=java&techTag=javascript @RequestMapping(method = RequestMethod.GET, value = "/events") public List<Event> getEventsAtLocation(@RequestParam(value = "cityId") int cityId, @RequestParam(value = "payed") boolean payed, @RequestParam(value = "lat") double latitude, @RequestParam(value = "lon") double longitude, @RequestParam(value = "radius") int radius, @RequestParam(value = "techTag") String[] techTags) { // todo return null; } }
restservice/src/main/java/org/itevents/controller/EventRestController.java
package org.itevents.controller; import org.itevents.model.Event; import org.itevents.model.Location; import org.itevents.service.EventService; import org.itevents.service.EventServiceImpl; import org.springframework.beans.support.PagedListHolder; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.util.ArrayList; import java.util.List; @RestController public class EventRestController { ApplicationContext context = new ClassPathXmlApplicationContext("applicationContext.xml"); private EventService eventService = context.getBean("eventService", EventServiceImpl.class); @RequestMapping(value = "/events/{id}") public ResponseEntity<Event> getEvent(@PathVariable("id") int id) { Event event = eventService.getEvent(id); if (event == null) { return new ResponseEntity(HttpStatus.NOT_FOUND); } return new ResponseEntity<Event>(event, HttpStatus.OK); } /** * REST-method GET that returns list of all events at the location with pagination * * @param page number of page of events' list * @param itemsPerPage number of events placed on the page * @param latitude latitude of the area center * @param longitude longitude of the area center * @param radius radius of the area * @return list of events at the location */ @RequestMapping(method = RequestMethod.GET, value = "/events") public List<Event> getEventsAtLocation(@RequestParam(value = "page") int page, @RequestParam(value = "itemsPerPage") int itemsPerPage, @RequestParam(value = "lat") double latitude, @RequestParam(value = "lon") double longitude, @RequestParam(value = "radius") int radius) { if (itemsPerPage <= 0) { return new ArrayList<>(); } Location location = new Location(latitude, longitude); List<Event> events = eventService.getFutureEventsInRadius(location, radius); int pages = events.size()/itemsPerPage; if (events.size() % itemsPerPage != 0) { pages++; } PagedListHolder<Event> paginatedEvents = new PagedListHolder<Event>(events); paginatedEvents.setPageSize(itemsPerPage); if (page <= 0) { return paginatedEvents.getPageList(); } if (page > pages - 1) { paginatedEvents.setPage(pages - 1); return paginatedEvents.getPageList(); } paginatedEvents.setPage(page); return paginatedEvents.getPageList(); } // radius=10&cityId=23&lat=50.434&lon=30.543&payed=true&techTag=java&techTag=javascript @RequestMapping(method = RequestMethod.GET, value = "/events") public List<Event> getEventsAtLocation(@RequestParam(value = "cityId") int cityId, @RequestParam(value = "payed") boolean payed, @RequestParam(value = "lat") double latitude, @RequestParam(value = "lon") double longitude, @RequestParam(value = "radius") int radius) { // todo return null; } }
add RequestParam techTag
restservice/src/main/java/org/itevents/controller/EventRestController.java
add RequestParam techTag
<ide><path>estservice/src/main/java/org/itevents/controller/EventRestController.java <ide> @RequestParam(value = "payed") boolean payed, <ide> @RequestParam(value = "lat") double latitude, <ide> @RequestParam(value = "lon") double longitude, <del> @RequestParam(value = "radius") int radius) { <add> @RequestParam(value = "radius") int radius, <add> @RequestParam(value = "techTag") String[] techTags) { <ide> // todo <ide> <ide> return null;
JavaScript
mit
7efdc67432e2cf925484e6823512a1eee1590b59
0
DemocracyOS/app,DemocracyOS/app
var prefix = '/:forum' function createRouter (config) { var multiForum = config.multiForum return function forumRouter (route) { if (!multiForum) return route || '/' if (!route) return prefix if (route[0] !== '/') route = '/' + route return prefix + route } } module.exports = createRouter
lib/forum-router/create-router.js
var prefix = '/:forum' function createRouter (config) { var multiForum = config.multiForum return function forumRouter (route) { if (!multiForum) return route if (!route) return prefix if (route[0] !== '/') route = '/' + route return prefix + route } } module.exports = createRouter
add default singleforum route
lib/forum-router/create-router.js
add default singleforum route
<ide><path>ib/forum-router/create-router.js <ide> function createRouter (config) { <ide> var multiForum = config.multiForum <ide> return function forumRouter (route) { <del> if (!multiForum) return route <add> if (!multiForum) return route || '/' <ide> if (!route) return prefix <ide> if (route[0] !== '/') route = '/' + route <ide> return prefix + route
JavaScript
mit
48ca03325cfdc098246e70cc19b33a75a74a6cf1
0
ello/webapp,ello/webapp,ello/webapp
import { Map, OrderedSet, List } from 'immutable' import { createSelector } from 'reselect' import get from 'lodash/get' import startCase from 'lodash/startCase' import trunc from 'trunc-html' import { CATEGORIES } from '../constants/mapping_types' import { META } from '../constants/locales/en' import { selectParamsType } from './params' import { selectPathname } from './routing' import { selectSubscribedCategoryIds } from './profile' import { selectIsLoggedIn } from './authentication' export const selectPropsCategoryId = (state, props) => get(props, 'categoryId') // state.json.categories.xxx export const selectCategoryCollection = state => state.json.get(CATEGORIES) // Requires `categoryId` to be found in props export const selectCategory = createSelector( [selectPropsCategoryId, selectCategoryCollection], (id, categories) => categories.get(id, Map()), ) export const selectCategoryName = createSelector([selectCategory], category => category.get('name')) export const selectCategorySlug = createSelector([selectCategory], category => category.get('slug')) export const selectCategoryTileImageUrl = createSelector([selectCategory], category => category.getIn(['tileImage', 'large', 'url'])) export const selectCategoryIsSubscribed = createSelector( [selectCategory, selectIsLoggedIn, selectSubscribedCategoryIds], (category, isLoggedIn, subscribedIds) => ( category && isLoggedIn && subscribedIds.includes(category.get('id')))) export const selectCategoryIsPromo = createSelector([selectCategory], category => category.get('level') === 'promo') export const selectAllCategoriesAsArray = createSelector([selectCategoryCollection], categories => (categories || Map()).valueSeq()) const levelEnum = { promoted: 10, primary: 20, secondary: 30, tertiary: 40, } function sortCategoriesByLevelAndOrder(a, b) { const levelA = levelEnum[a.get('level')] const levelB = levelEnum[b.get('level')] const orderA = a.get('order') const orderB = b.get('order') if (levelA > levelB) { return 1 } else if (levelB > levelA) { return -1 } else if (orderA > orderB) { return 1 } else if (orderB > orderA) { return -1 } return 0 } export const selectOrderedCategories = createSelector( [selectAllCategoriesAsArray], categories => categories.sort(sortCategoriesByLevelAndOrder)) export const selectOrderedCategoryIds = createSelector( [selectOrderedCategories], cats => cats.map(cat => cat.get('id'))) export const selectOnboardingCategoriesFiltered = createSelector( [selectOrderedCategories], categories => categories.filter(category => category.get('allowInOnboarding')).toArray()) export const selectCreatorTypeCategories = createSelector( [selectOrderedCategories], categories => categories.filter(category => category.get('isCreatorType')).toArray()) export const selectCategoryTabs = createSelector( [selectCategoryCollection, selectIsLoggedIn, selectSubscribedCategoryIds], (categories, isLoggedIn, subscribedIds) => { if (!categories) { return [] } const promoIds = OrderedSet(categories.filter(cat => cat.get('level') === 'promo').keySeq()) let navIds = promoIds if (isLoggedIn) { navIds = navIds.concat(subscribedIds) } else { const primaryIds = categories.filter(cat => cat.get('level') === 'primary').keySeq() navIds = navIds.concat(primaryIds) } return navIds.reduce((ids, id) => { const label = categories.getIn([id, 'name']) const slug = categories.getIn([id, 'slug']) const categoryLevel = categories.getIn([id, 'level']) if (!slug || !label) { return ids } return ids.push({ label, to: `/discover/${slug}`, promo: (categoryLevel === 'promo'), sources: { small: categories.getIn([id, 'tileImage', 'small', 'url']), large: categories.getIn([id, 'tileImage', 'large', 'url']), }, }) }, List()).toArray() }, ) // determine if any non-promo categories are subscribed to export const selectAreCategoriesSubscribed = createSelector( [selectSubscribedCategoryIds], (subscribedIds) => { if (subscribedIds && subscribedIds.size > 0) { return true } return false }) export const selectCategoryPageTitle = createSelector( [selectParamsType, selectCategoryCollection], (paramsType, categories) => { switch (paramsType) { case 'all': return null case undefined: case 'recommended': return 'Featured' default: { const cat = categories && categories.find(c => c.get('slug') === paramsType) return cat ? cat.get('name') : startCase(paramsType).replace(/\sX\s/, ' x ') } } }, ) export const selectCategoryForPath = createSelector( [selectPathname, selectAllCategoriesAsArray], (pathname, categories) => { const slug = pathname.replace('/discover/', '') return categories.find(category => category.get('slug') === slug) || Map() }, ) export const selectDiscoverMetaData = createSelector( [selectParamsType, selectCategoryForPath, selectCategoryPageTitle], (type, category, pageTitle) => { const titlePrefix = pageTitle ? `${pageTitle} | ` : '' const title = `${titlePrefix}Ello` const image = category.getIn(['tileImage', 'large', 'url'], META.IMAGE) let description = '' switch (type) { case undefined: case 'featured': case 'recommended': description = META.FEATURED_PAGE_DESCRIPTION break case 'recent': description = META.RECENT_PAGE_DESCRIPTION break case 'trending': description = META.TRENDING_PAGE_DESCRIPTION break case 'all': description = META.ALL_PAGE_DESCRIPTION break default: { description = category && category.get('description') ? trunc(category.get('description'), 160).text : META.DESCRIPTION break } } return { description, image, title } }, )
src/selectors/categories.js
import { Map, OrderedSet, List } from 'immutable' import { createSelector } from 'reselect' import get from 'lodash/get' import startCase from 'lodash/startCase' import trunc from 'trunc-html' import { CATEGORIES } from '../constants/mapping_types' import { META } from '../constants/locales/en' import { selectParamsType } from './params' import { selectPathname } from './routing' import { selectSubscribedCategoryIds } from './profile' import { selectIsLoggedIn } from './authentication' export const selectPropsCategoryId = (state, props) => get(props, 'categoryId') // state.json.categories.xxx export const selectCategoryCollection = state => state.json.get(CATEGORIES) // Requires `categoryId` to be found in props export const selectCategory = createSelector( [selectPropsCategoryId, selectCategoryCollection], (id, categories) => categories.get(id, Map()), ) export const selectCategoryName = createSelector([selectCategory], category => category.get('name')) export const selectCategorySlug = createSelector([selectCategory], category => category.get('slug')) export const selectCategoryTileImageUrl = createSelector([selectCategory], category => category.getIn(['tileImage', 'large', 'url'])) export const selectCategoryIsSubscribed = createSelector( [selectCategory, selectIsLoggedIn, selectSubscribedCategoryIds], (category, isLoggedIn, subscribedIds) => ( category && isLoggedIn && subscribedIds.includes(category.get('id')))) export const selectCategoryIsPromo = createSelector([selectCategory], category => category.get('level') === 'promo') export const selectAllCategoriesAsArray = createSelector([selectCategoryCollection], categories => (categories || Map()).valueSeq()) const levelEnum = { promoted: 10, primary: 20, secondary: 30, tertiary: 40, } function sortCategoriesByLevelAndOrder(a, b) { const levelA = levelEnum[a.get('level')] const levelB = levelEnum[b.get('level')] const orderA = a.get('order') const orderB = b.get('order') if (levelA > levelB) { return 1 } else if (levelB > levelA) { return -1 } else if (orderA > orderB) { return 1 } else if (orderB > orderA) { return -1 } return 0 } export const selectOrderedCategories = createSelector( [selectAllCategoriesAsArray], categories => categories.sort(sortCategoriesByLevelAndOrder)) export const selectOrderedCategoryIds = createSelector( [selectOrderedCategories], cats => cats.map(cat => cat.get('id'))) export const selectOnboardingCategoriesFiltered = createSelector( [selectOrderedCategories], categories => categories.filter(category => category.get('allowInOnboarding')).toArray()) export const selectCreatorTypeCategories = createSelector( [selectOrderedCategories], categories => categories.filter(category => category.get('isCreatorType')).toArray()) export const selectCategoryTabs = createSelector( [selectCategoryCollection, selectIsLoggedIn, selectSubscribedCategoryIds], (categories, isLoggedIn, subscribedIds) => { if (!categories) { return [] } const promoIds = OrderedSet(categories.filter(cat => cat.get('level') === 'promo').keySeq()) let navIds = promoIds if (isLoggedIn) { navIds = navIds.concat(subscribedIds) } else { const primaryIds = categories.filter(cat => cat.get('level') === 'primary').keySeq() navIds = navIds.concat(primaryIds) } return navIds.reduce((ids, id) => { const label = categories.getIn([id, 'name']) const slug = categories.getIn([id, 'slug']) const categoryLevel = categories.getIn([id, 'level']) if (!slug || !label) { return ids } return ids.push({ label, to: `/discover/${slug}`, promo: (categoryLevel === 'promo'), sources: { small: categories.getIn([id, 'tileImage', 'small', 'url']), large: categories.getIn([id, 'tileImage', 'large', 'url']), }, }) }, List()).toArray() }, ) // determine if any non-promo categories are subscribed to export const selectAreCategoriesSubscribed = createSelector( [selectSubscribedCategoryIds], (subscribedIds) => { if (subscribedIds.size > 0) { return true } return false }) export const selectCategoryPageTitle = createSelector( [selectParamsType, selectCategoryCollection], (paramsType, categories) => { switch (paramsType) { case 'all': return null case undefined: case 'recommended': return 'Featured' default: { const cat = categories && categories.find(c => c.get('slug') === paramsType) return cat ? cat.get('name') : startCase(paramsType).replace(/\sX\s/, ' x ') } } }, ) export const selectCategoryForPath = createSelector( [selectPathname, selectAllCategoriesAsArray], (pathname, categories) => { const slug = pathname.replace('/discover/', '') return categories.find(category => category.get('slug') === slug) || Map() }, ) export const selectDiscoverMetaData = createSelector( [selectParamsType, selectCategoryForPath, selectCategoryPageTitle], (type, category, pageTitle) => { const titlePrefix = pageTitle ? `${pageTitle} | ` : '' const title = `${titlePrefix}Ello` const image = category.getIn(['tileImage', 'large', 'url'], META.IMAGE) let description = '' switch (type) { case undefined: case 'featured': case 'recommended': description = META.FEATURED_PAGE_DESCRIPTION break case 'recent': description = META.RECENT_PAGE_DESCRIPTION break case 'trending': description = META.TRENDING_PAGE_DESCRIPTION break case 'all': description = META.ALL_PAGE_DESCRIPTION break default: { description = category && category.get('description') ? trunc(category.get('description'), 160).text : META.DESCRIPTION break } } return { description, image, title } }, )
Check for subscribedIds before checking size.
src/selectors/categories.js
Check for subscribedIds before checking size.
<ide><path>rc/selectors/categories.js <ide> export const selectAreCategoriesSubscribed = createSelector( <ide> [selectSubscribedCategoryIds], <ide> (subscribedIds) => { <del> if (subscribedIds.size > 0) { return true } <add> if (subscribedIds && subscribedIds.size > 0) { return true } <ide> return false <ide> }) <ide>
Java
apache-2.0
f4d227e888d7d6f11d6800abcd8e1d22e6af62e0
0
folio-org/okapi,folio-org/okapi
package okapi.util; import org.folio.okapi.util.LockedStringMap; import io.vertx.core.Vertx; import io.vertx.core.logging.Logger; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import org.folio.okapi.common.ErrorType; import org.folio.okapi.common.OkapiLogger; import org.junit.After; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Before; import org.junit.runner.RunWith; /** * * @author heikki */ @RunWith(VertxUnitRunner.class) public class LockedStringMapTest { private final Logger logger = OkapiLogger.get(); private Vertx vertx; private Async async; private LockedStringMap map = new LockedStringMap(); @Before public void setUp(TestContext context) { logger.debug("starting LockedStringMapTest"); vertx = Vertx.vertx(); } @After public void tearDown(TestContext context) { async = context.async(); vertx.close(x -> { async.complete(); }); } @Test public void testit(TestContext context) { async = context.async(); map.init(vertx, "FooMap", res -> { listEmpty(context); }); } public void listEmpty(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[]".equals(res.result().toString())); testadd(context); }); } public void testadd(TestContext context) { map.addOrReplace(false, "k1", "k2", "FOOBAR", res -> { assertTrue(res.succeeded()); testReplaceTrue(context); }); } public void testReplaceTrue(TestContext context) { map.addOrReplace(true, "k1", "k2", "FOOBAR", res -> { assertTrue(res.succeeded()); testReplaceFalse(context); }); } public void testReplaceFalse(TestContext context) { map.addOrReplace(false, "k1", "k2", "FOOBAR", res -> { assertTrue(res.failed()); testgetK12(context); }); } private void testgetK12(TestContext context) { map.getString("k1", "k2", res -> { assertTrue(res.succeeded()); assertEquals("FOOBAR", res.result()); testgetK13(context); }); } private void testgetK13(TestContext context) { map.getString("k1", "k3", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); testgetK14(context); }); } private void testgetK14(TestContext context) { map.getString("foo", "bar", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); testgetK1(context); }); } private void testgetK1(TestContext context) { map.getString("k1", res -> { assertTrue(res.succeeded()); assertEquals("[FOOBAR]", res.result().toString()); addAnother(context); }); } public void addAnother(TestContext context) { map.addOrReplace(false, "k1", "k2.2", "SecondFoo", res -> { assertTrue(res.succeeded()); addSecondK1(context); }); } public void addSecondK1(TestContext context) { map.addOrReplace(false, "k1.1", "x", "SecondKey", res -> { assertTrue(res.succeeded()); testgetK1Again(context); }); } private void testgetK1Again(TestContext context) { map.getString("k1", res -> { assertTrue(res.succeeded()); assertEquals("[FOOBAR, SecondFoo]", res.result().toString()); listKeys(context); }); } public void listKeys(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1, k1.1]".equals(res.result().toString())); deleteKey1(context); }); } private void deleteKey1(TestContext context) { map.remove("k1", "k2", res -> { assertTrue(res.succeeded()); assertFalse(res.result()); // there is still k1/k2.2 left deleteKey1again(context); }); } private void deleteKey1again(TestContext context) { map.remove("k1", "k2", res -> { assertTrue(res.failed()); listKeys1(context); }); } private void listKeys1(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1, k1.1]".equals(res.result().toString())); deleteKey2(context); }); } private void deleteKey2(TestContext context) { map.remove("k1", "k2.2", res -> { assertTrue(res.succeeded()); assertTrue(res.result()); // no keys left testgek1Lower(context); }); } private void testgek1Lower(TestContext context) { map.getString("k1", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); listKeys2(context); }); } private void listKeys2(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1.1]".equals(res.result().toString())); done(context); }); } private void done(TestContext context) { async.complete(); } }
okapi-core/src/test/java/okapi/util/LockedStringMapTest.java
package okapi.util; import org.folio.okapi.util.LockedStringMap; import io.vertx.core.Vertx; import io.vertx.core.logging.Logger; import io.vertx.ext.unit.Async; import io.vertx.ext.unit.TestContext; import io.vertx.ext.unit.junit.VertxUnitRunner; import org.folio.okapi.common.ErrorType; import org.folio.okapi.common.OkapiLogger; import org.junit.After; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Before; import org.junit.runner.RunWith; /** * * @author heikki */ @RunWith(VertxUnitRunner.class) public class LockedStringMapTest { private final Logger logger = OkapiLogger.get(); private Vertx vertx; private Async async; private LockedStringMap map = new LockedStringMap(); @Before public void setUp(TestContext context) { logger.debug("starting LockedStringMapTest"); vertx = Vertx.vertx(); } @After public void tearDown(TestContext context) { async = context.async(); vertx.close(x -> { async.complete(); }); } @Test public void testit(TestContext context) { async = context.async(); map.init(vertx, "FooMap", res -> { listEmpty(context); }); } public void listEmpty(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[]".equals(res.result().toString())); testadd(context); }); } public void testadd(TestContext context) { map.addOrReplace(false, "k1", "k2", "FOOBAR", res -> { assertTrue(res.succeeded()); testgetK12(context); }); } private void testgetK12(TestContext context) { map.getString("k1", "k2", res -> { assertTrue(res.succeeded()); assertEquals("FOOBAR", res.result()); testgetK13(context); }); } private void testgetK13(TestContext context) { map.getString("k1", "k3", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); testgetK14(context); }); } private void testgetK14(TestContext context) { map.getString("foo", "bar", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); testgetK1(context); }); } private void testgetK1(TestContext context) { map.getString("k1", res -> { assertTrue(res.succeeded()); assertEquals("[FOOBAR]", res.result().toString()); addAnother(context); }); } public void addAnother(TestContext context) { map.addOrReplace(false, "k1", "k2.2", "SecondFoo", res -> { assertTrue(res.succeeded()); addSecondK1(context); }); } public void addSecondK1(TestContext context) { map.addOrReplace(false, "k1.1", "x", "SecondKey", res -> { assertTrue(res.succeeded()); testgetK1Again(context); }); } private void testgetK1Again(TestContext context) { map.getString("k1", res -> { assertTrue(res.succeeded()); assertEquals("[FOOBAR, SecondFoo]", res.result().toString()); listKeys(context); }); } public void listKeys(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1, k1.1]".equals(res.result().toString())); deleteKey1(context); }); } private void deleteKey1(TestContext context) { map.remove("k1", "k2", res -> { assertTrue(res.succeeded()); assertFalse(res.result()); // there is still k1/k2.2 left listKeys1(context); }); } private void listKeys1(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1, k1.1]".equals(res.result().toString())); deleteKey2(context); }); } private void deleteKey2(TestContext context) { map.remove("k1", "k2.2", res -> { assertTrue(res.succeeded()); assertTrue(res.result()); // no keys left testgek1Lower(context); }); } private void testgek1Lower(TestContext context) { map.getString("k1", res -> { assertTrue(res.failed()); assertEquals(ErrorType.NOT_FOUND, res.getType()); listKeys2(context); }); } private void listKeys2(TestContext context) { map.getKeys(res -> { assertTrue(res.succeeded()); assertTrue("[k1.1]".equals(res.result().toString())); done(context); }); } private void done(TestContext context) { async.complete(); } }
Further test of LockedStringMap
okapi-core/src/test/java/okapi/util/LockedStringMapTest.java
Further test of LockedStringMap
<ide><path>kapi-core/src/test/java/okapi/util/LockedStringMapTest.java <ide> public void testadd(TestContext context) { <ide> map.addOrReplace(false, "k1", "k2", "FOOBAR", res -> { <ide> assertTrue(res.succeeded()); <add> testReplaceTrue(context); <add> }); <add> } <add> <add> public void testReplaceTrue(TestContext context) { <add> map.addOrReplace(true, "k1", "k2", "FOOBAR", res -> { <add> assertTrue(res.succeeded()); <add> testReplaceFalse(context); <add> }); <add> } <add> <add> public void testReplaceFalse(TestContext context) { <add> map.addOrReplace(false, "k1", "k2", "FOOBAR", res -> { <add> assertTrue(res.failed()); <ide> testgetK12(context); <ide> }); <ide> } <ide> map.remove("k1", "k2", res -> { <ide> assertTrue(res.succeeded()); <ide> assertFalse(res.result()); // there is still k1/k2.2 left <add> deleteKey1again(context); <add> }); <add> } <add> <add> private void deleteKey1again(TestContext context) { <add> map.remove("k1", "k2", res -> { <add> assertTrue(res.failed()); <ide> listKeys1(context); <ide> }); <ide> }
Java
mit
b04f01a8487013075e4579e5ce98f29c6c21e05f
0
plum-umd/java-sketch,plum-umd/java-sketch,plum-umd/java-sketch,plum-umd/java-sketch
/** * * Generally speaking, suffix arrays are used to do multiple queries * efficiently on one piece of data rather than to do one operation * then move on to another piece of text. * * Good suffix array read: http://www.cs.yale.edu/homes/aspnes/pinewiki/SuffixArrays.html * * @author William Fiset, [email protected] **/ // MODEL SYNTHESIS // clone: 338318 (using testLRS, default params) // lrs: // AXIOM SYNTHESIS // clone: 32605 (using testLRS, default params) // lrs: import java.util.*; class SuffixArray { // Size of the suffix array int N; // T is the text int[] T; // Suffix array. Contains the indexes of sorted suffixes. int[] sa; // Contains Longest Common Prefix (LCP) count between adjacent suffixes. // lcp[i] = longestCommonPrefixLength( suffixes[i], suffixes[i+1] ). // Also, LCP[len-1] = 0 int [] lcp; // CHANGE public int[] clone(int[] arr) { // int l = {| arr.length, N |}; int l = arr.length; int[] arr_cp = new int[l]; // for(int i=??; i<l; i++) { for(int i=0; i<l; i++) { arr_cp[i] = arr[i]; } return arr_cp; } public SuffixArray(String text) { this(toIntArray(text)); } //CHANGE private static String intArrToString(int [] text) { char[] tmp = new char[text.length]; for (int i=0; i<text.length; i++) { tmp[i] = (char) text[i]; } // Extract part of the suffix we need to compare return new String(tmp, 0, text.length); } private static int[] toIntArray(String s) { int[] text = new int[s.length()]; for(int i=0;i<s.length();i++)text[i] = s.charAt(i); return text; } public SuffixArray(int[] text) { // CHANGE // if (text == null) throw new IllegalArgumentException(); // T = text.clone(); T = clone(text); N = text.length; construct(); kasai(); } // Construct a suffix array in O(nlog^2(n)) public void construct() { sa = new int[N]; // Maintain suffix ranks in both a matrix with two rows containing the // current and last rank information as well as some sortable rank objects // CHANGE // int[][] suffixRanks = new int[2][N]; TwoDArray suffixRanks = new TwoDArray(2, N); SuffixRankTuple[] ranks = new SuffixRankTuple[N]; // Assign a numerical value to each character in the text for (int i = 0; i < N; i++) { // CHANGE // suffixRanks[0][i] = T[i]; suffixRanks.set(0, i, T[i]); ranks[i] = new SuffixRankTuple(); } // O(logn) for(int pos = 1; pos < N; pos *= 2) { for(int i = 0; i < N; i++) { SuffixRankTuple suffixRank = ranks[i]; suffixRank.firstHalf = suffixRanks.get(0, i); // CHANGE // suffixRank.firstHalf = suffixRanks[0][i]; suffixRank.secondHalf = i+pos < N ? suffixRanks.get(0, i+pos) : -1; // CHANGE // suffixRank.secondHalf = i+pos < N ? suffixRanks[0][i+pos] : -1; suffixRank.originalIndex = i; } // O(nlogn) // CHANGE // java.util.Arrays.sort(ranks); ranks = Arrays.sort(ranks, ranks.length); int newRank = 0; suffixRanks.set(1, ranks[0].originalIndex, 0); // CHANGE // suffixRanks[1][ranks[0].originalIndex] = 0; for (int i = 1; i < N; i++ ) { SuffixRankTuple lastSuffixRank = ranks[i-1]; SuffixRankTuple currSuffixRank = ranks[i]; // If the first half differs from the second half if (currSuffixRank.firstHalf != lastSuffixRank.firstHalf || currSuffixRank.secondHalf != lastSuffixRank.secondHalf) newRank++; suffixRanks.set(1, currSuffixRank.originalIndex, newRank); // CHANGE // suffixRanks[1][currSuffixRank.originalIndex] = newRank; } // Place top row (current row) to be the last row suffixRanks.setRow(0, suffixRanks.getRow(1)); // CHANGE // suffixRanks[0] = suffixRanks[1]; // Optimization to stop early // CHANGE // if (newRank == N-1) break; if (newRank == N-1) pos = N; } // Fill suffix array for (int i = 0; i < N; i++) { sa[i] = ranks[i].originalIndex; ranks[i] = null; } // Cleanup suffixRanks = null; // CHANGE // suffixRanks[0] = suffixRanks[1] = null; suffixRanks = null; ranks = null; } // Constructs the LCP (longest common prefix) array in linear time - O(n) // http://www.mi.fu-berlin.de/wiki/pub/ABI/RnaSeqP4/suffix-array.pdf private void kasai() { lcp = new int[N]; // Compute inverse index values int [] inv = new int[N]; for (int i = 0; i < N; i++) inv[sa[i]] = i; // Current lcp length int len = 0; for (int i = 0; i < N; i++) { if (inv[i] > 0) { // Get the index of where the suffix below is int k = sa[inv[i]-1]; // Compute lcp length. For most loops this is O(1) while( (i + len < N) && (k + len < N) && T[i+len] == T[k+len] ) len++; lcp[inv[i]-1] = len; if (len > 0) len--; } } } // // Runs on O(mlog(n)) where m is the length of the substring // // and n is the length of the text. // // NOTE: This is the naive implementation. There exists an // // implementation which runs in O(m + log(n)) time // public boolean contains(String substr) { // if (substr == null) return false; // if (substr.equals("")) return true; // String suffix_str; // int lo = 0, hi = N - 1; // int substr_len = substr.length(); // while( lo <= hi ) { // int mid = (lo + hi) / 2; // int suffix_index = sa[mid]; // int suffix_len = N - suffix_index; // // CHANGE // char[] tmp = new char[T.length]; // for (int i=0; i<T.length; i++) { // tmp[i] = (char) T[i]; // } // // Extract part of the suffix we need to compare // if (suffix_len <= substr_len) suffix_str = new String(tmp, suffix_index, suffix_len); // else suffix_str = new String(tmp, suffix_index, substr_len); // // CHANGE // // if (suffix_len <= substr_len) suffix_str = new String(T, suffix_index, suffix_len); // // else suffix_str = new String(T, suffix_index, substr_len); // int cmp = suffix_str.compareTo(substr); // // Found a match // if ( cmp == 0 ) { // // To find the first occurrence linear scan up/down // // from here or keep doing binary search // return true; // // Substring is found above // } else if (cmp < 0) { // lo = mid + 1; // // Substring is found below // } else { // hi = mid - 1; // } // } // return false; // } // generator public void forLoop() { // // for (int i = 0; i < blah; i++) // int t = ??; // int i = ??; // boolean b1 = i < ??; // for (int i = ??; i {| |} blah; i=i+??){ // } // } public TreeSet <String> lrs() { int[] localInts = new int[10]; Object[] localObjs = new Object[10]; return lrsGen(localInts, localObjs); } generator public int genInt(int[] localInts, Object[] localObjs, int i) { int local = localInts[0]; int i1 = lcp[??]; int i2 = lcp[i]; int i3 = lcp[local]; int i4 = T[??]; int i5 = T[local]; int i6 = T[i]; int i7 = sa[??]; int i8 = sa[i]; int i9 = sa[local]; int sz = 0; if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; sz = lrss.size(); } int p = {| i, i1, i2, i3, i4, i5, i6, i7, i8, i9, local, ??, T.length, N, sz |}; return p; } public void initVars(int[] localInts, int numLocalInts, Object[] localObjs, int numLocalObjs) { // int t = ??; // if (numLocalInts < 10) { // if (??) { // localInts[numLocalInts] = ??; // numLocalInts++; // } // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new TreeSet<>(); // numLocalObjs++; // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new Object(); // numLocalObjs++; // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new String(); // numLocalObjs++; // } localInts[0] = 0; localObjs[0] = new TreeSet<>(); } public TreeSet<String> lrsGen(int[] localInts, Object[] localObjs) { char[] tmp; if (??) { initVars(localInts, 0, localObjs, 0); } if (??) { tmp = new char[T.length]; int g1 = genInt(localInts, localObjs, 0); for (int i=0; {| i == g1 | i < g1 | i <= g1 | i > g1 | i >= g1 |}; i++) { char r = (char) genInt(localInts, localObjs, i); if (??) { tmp[??] = r; } if (??) { tmp[i] = r; } } } if (??) { int g1 = genInt(localInts, localObjs, 0); for (int i=0; {| i == g1 | i < g1 | i <= g1 | i > g1 | i >= g1 |}; i++) { boolean comp1 = genGuard(localInts, localObjs, i, tmp); boolean comp2 = genGuard(localInts, localObjs, i, tmp); if (comp1) { if (comp2) { // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; // lrss.clear(); genStmts(localInts, localObjs, i, tmp); } genStmts(localInts, localObjs, i, tmp); // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; // localInts[0] = lcp[i]; // lrss.add(new String(tmp, sa[i], localInts[0])); } } } if (??) { return (TreeSet<String>) localObjs[0]; } return null; } generator public boolean genGuard(int[] localInts, Object[] localObjs, int i, char[] tmp) { int i1 = genInt(localInts, localObjs, i); int i2 = genInt(localInts, localObjs, i); boolean contains = true; if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; int index1 = genInt(localInts, localObjs, i); int index2 = genInt(localInts, localObjs, i); contains = lrss.contains(new String(tmp, index1, index2)); } return {| i1 == i2, i1 < i2, i1 <= i2, contains |}; } generator public void genStmt(int[] localInts, Object[] localObjs, int i, char[] tmp) { if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; lrss.clear(); } if (??) { int i1 = genInt(localInts, localObjs, i); localInts[0] = i1; } if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; int index1 = genInt(localInts, localObjs, i); int index2 = genInt(localInts, localObjs, i); lrss.add(new String(tmp, index1, index2)); } } generator public void genStmts(int[] localInts, Object[] localObjs, int i, char[] tmp) { if (??) { genStmt(localInts, localObjs, i, tmp); } if (??) { genStmts(localInts, localObjs, i, tmp); } } // Finds the LRS(s) (Longest Repeated Substring) that occurs in a string. // Traditionally we are only interested in substrings that appear at // least twice, so this method returns an empty set if this is the case. // @return an ordered set of longest repeated substrings public TreeSet <String> lrs2() { int[] localInts = new int[10]; Object[] localObjs = new Object[10]; initVars(localInts, 0, localObjs, 0); // int max_len = 0; // TreeSet <String> lrss = new TreeSet<>(); int max_len = localInts[0]; TreeSet <String> lrss = localObjs[0]; char[] tmp = new char[T.length]; // CHANGE // int g1 = {| T.length, N, max_len, ?? |}; // for (int i=??; i<g1; i=i+??) { for (int i=0; i<T.length; i++) { tmp[i] = (char) T[i]; } // int g2 = {| T.length, N, max_len, ?? |}; // for (int i = ??; i < g2; i=i+??) { for (int i = 0; i < N; i++) { // if (lcp[i] > ?? && lcp[i] >= max_len) { if (lcp[i] > 0 && lcp[i] >= max_len) { // We found a longer LRS if ( lcp[i] > max_len ) { lrss.clear(); } // Append substring to the list and update max max_len = lcp[i]; // CHANGE lrss.add( new String(tmp, sa[i], max_len) ); // lrss.add( new String(T, sa[i], max_len) ); } } return lrss; } // // /** // // * Finds the Longest Common Substring (LCS) between a group of strings. // // * The current implementation takes O(nlog(n)) bounded by the suffix array construction. // // * @param strs - The strings you wish to find the longest common substring between // // * @param K - The minimum number of strings to find the LCS between. K must be at least 2. // // **/ // public static TreeSet<String> lcs(String [] strs, int K) { // // CHANGE // // if (K <= 1) throw new IllegalArgumentException("K must be greater than or equal to 2!"); // if (K <= 1) { // return null; // } // TreeSet<String> lcss = new TreeSet(); // if (strs == null || strs.length <= 1) return lcss; // // L is the concatenated length of all the strings and the sentinels // int L = 0; // final int NUM_SENTINELS = strs.length, N = strs.length; // for(int i = 0; i < N; i++) L += strs[i].length() + 1; // int[] indexMap = new int[L]; // // CHANGE // int LOWEST_ASCII = 1000; // // int LOWEST_ASCII = Integer.MAX_VALUE; // int k = 0; // // Find the lowest ASCII value within the strings. // // Also construct the index map to know which original // // string a given suffix belongs to. // for (int i = 0; i < strs.length; i++) { // String str = strs[i]; // for (int j = 0; j < str.length(); j++) { // int asciiVal = str.charAt(j); // if (asciiVal < LOWEST_ASCII) LOWEST_ASCII = asciiVal; // indexMap[k] = i; // k++; // } // // Record that the sentinel belongs to string i // indexMap[k] = i; // k++; // } // final int SHIFT = LOWEST_ASCII + NUM_SENTINELS + 1; // int sentinel = 0; // int[] T = new int[L]; // // CHANGE // k = 0; // // Construct the new text with the shifted values and the sentinels // for(int i = 0; i < N; i++) { // // for(int i = 0, k = 0; i < N; i++) { // String str = strs[i]; // for (int j = 0; j < str.length(); j++) { // T[k] = ((int)str.charAt(j)) + SHIFT; // k++; // } // T[k] = sentinel; // sentinel++; // k++; // } // // CHANGE // String tmp = intArrToString(T); // SuffixArray sa = new SuffixArray(tmp); // // // SuffixArray sa = new SuffixArray(T); // ArrayDeque <Integer> deque = new ArrayDeque<>(); // HashMap <Integer, Integer> windowColorCount = new HashMap<>(); // HashSet <Integer> windowColors = new HashSet<>(); // // Start the sliding window at the number of sentinels because those // // all get sorted first and we want to ignore them // int lo = NUM_SENTINELS, hi = NUM_SENTINELS, bestLCSLength = 0; // // Add the first color // int firstColor = indexMap[sa.sa[hi]]; // windowColors.add(new Integer(firstColor)); // windowColorCount.put(new Integer(firstColor), new Integer(1)); // int count = 0; // // Maintain a sliding window between lo and hi // while(hi < L) { // int uniqueColors = windowColors.size(); // // Attempt to update the LCS // if (uniqueColors >= K) { // // CHANGE // Integer deqPeekFirst = deque.peekFirst(); // int deqPeekFirst_int = deqPeekFirst.intValue(); // int windowLCP = sa.lcp[deqPeekFirst_int]; // // int windowLCP = sa.lcp[deque.peekFirst()]; // if (windowLCP > 0 && bestLCSLength < windowLCP) { // bestLCSLength = windowLCP; // lcss.clear(); // } // if (windowLCP > 0 && bestLCSLength == windowLCP) { // // Construct the current LCS within the window interval // int pos = sa.sa[lo]; // char[] lcs = new char[windowLCP]; // for (int i = 0; i < windowLCP; i++) lcs[i] = (char)(T[pos+i] - SHIFT); // // CHANGE // lcss.add(new String(lcs, 0, lcs.length)); // // lcss.add(new String(lcs)); // // If you wish to find the original strings to which this longest // // common substring belongs to the indexes of those strings can be // // found in the windowColors set, so just use those indexes on the 'strs' array // } // // Update the colors in our window // int lastColor = indexMap[sa.sa[lo]]; // // CHANGE // Integer colorCount = windowColorCount.get(new Integer(lastColor)); // // Integer colorCount = windowColorCount.get(lastColor); // int check = colorCount.intValue(); // // CHANGE // boolean removed = false; // if (colorCount.intValue() == 1) { // windowColors.remove(new Integer(lastColor)); // removed = true; // } // // if (colorCount == 1) windowColors.remove(lastColor); // // CHANGE // windowColorCount.put(new Integer(lastColor), new Integer(colorCount.intValue() - 1)); // // windowColorCount.put(lastColor, colorCount - 1); // // CHANGE // if (!deque.isEmpty()) { // // CHANGE // deqPeekFirst = deque.peekFirst(); // boolean deqPeekLessThanLo = deqPeekFirst.intValue() <= lo; // // Remove the head if it's outside the new range: [lo+1, hi) // while (!deque.isEmpty() && deqPeekLessThanLo) { // deque.removeFirst(); // deqPeekFirst = deque.peekFirst(); // if (deqPeekFirst != null) { // deqPeekLessThanLo = deqPeekFirst.intValue() <= lo; // } else { // deqPeekLessThanLo = false; // } // } // } // // Decrease the window size // lo++; // // Increase the window size because we don't have enough colors // } else if(hi+1 < L) { // hi++; // int nextColor = indexMap[sa.sa[hi]]; // // CHANGE // Integer nextColor_Int = new Integer(nextColor); // // Update the colors in our window // // CHANGE // windowColors.add(nextColor_Int); // // windowColors.add(nextColor); // // CHANGE // Integer colorCount = windowColorCount.get(nextColor_Int); // // Integer colorCount = windowColorCount.get(nextColor); // // CHANGE // if (colorCount == null) colorCount = new Integer(0); // // if (colorCount == null) colorCount = 0; // // CHANGE // windowColorCount.put(nextColor_Int, new Integer(colorCount.intValue() + 1)); // // windowColorCount.put(nextColor, colorCount + 1); // // CHANGE // if (!deque.isEmpty()) { // // CHANGE // Integer deqPeekLast = deque.peekLast(); // int deqPeekLast_int = deqPeekLast.intValue(); // // CHANGE // // Remove all the worse values in the back of the deque // while(!deque.isEmpty() && sa.lcp[deqPeekLast_int] > sa.lcp[hi-1]) { // // while(!deque.isEmpty() && sa.lcp[deque.peekLast()] > sa.lcp[hi-1]) // deque.removeLast(); // // CHANGE // if (!deque.isEmpty()) { // deqPeekLast = deque.peekLast(); // deqPeekLast_int = deqPeekLast.intValue(); // } // } // } // // CHANGE // deque.addLast(new Integer(hi-1)); // // deque.addLast(hi-1); // } // count++; // } // return lcss; // } // // public void display() { // // System.out.printf("-----i-----SA-----LCP---Suffix\n"); // // for(int i = 0; i < N; i++) { // // int suffixLen = N - sa[i]; // // String suffix = new String(T, sa[i], suffixLen); // // System.out.printf("% 7d % 7d % 7d %s\n", i, sa[i],lcp[i], suffix ); // // } // // } // // CHANGE // // // public static void main(String[] args){ // // harness public static void main() { // // // String[] strs = { "GAGL", "RGAG", "TGAGE" }; // // String[] strs = { "AAGAAGC", "AGAAGT", "CGAAGC" }; // // // String[] strs = { "abca", "bcad", "daca" }; // // // String[] strs = { "abca", "bcad", "daca" }; // // // String[] strs = { "AABC", "BCDC", "BCDE", "CDED" }; // // // String[] strs = { "abcdefg", "bcdefgh", "cdefghi" }; // // // String[] strs = { "xxx", "yyy", "zzz" }; // // TreeSet <String> lcss = SuffixArray.lcs(strs, 2); // // // System.out.println(lcss); // // // SuffixArray sa = new SuffixArray("abracadabra"); // // // System.out.println(sa); // // // System.out.println(java.util.Arrays.toString(sa.sa)); // // // System.out.println(java.util.Arrays.toString(sa.lcp)); // // // SuffixArray sa = new SuffixArray("ababcabaa"); // // // sa.display(); // // } }
test/axioms/benchmarks/SuffixArray_bigger/SuffixArray_loops.java
/** * * Generally speaking, suffix arrays are used to do multiple queries * efficiently on one piece of data rather than to do one operation * then move on to another piece of text. * * Good suffix array read: http://www.cs.yale.edu/homes/aspnes/pinewiki/SuffixArrays.html * * @author William Fiset, [email protected] **/ // MODEL SYNTHESIS // clone: 338318 (using testLRS, default params) // lrs: // AXIOM SYNTHESIS // clone: 32605 (using testLRS, default params) // lrs: import java.util.*; class SuffixArray { // Size of the suffix array int N; // T is the text int[] T; // Suffix array. Contains the indexes of sorted suffixes. int[] sa; // Contains Longest Common Prefix (LCP) count between adjacent suffixes. // lcp[i] = longestCommonPrefixLength( suffixes[i], suffixes[i+1] ). // Also, LCP[len-1] = 0 int [] lcp; // CHANGE public int[] clone(int[] arr) { // int l = {| arr.length, N |}; int l = arr.length; int[] arr_cp = new int[l]; // for(int i=??; i<l; i++) { for(int i=0; i<l; i++) { arr_cp[i] = arr[i]; } return arr_cp; } public SuffixArray(String text) { this(toIntArray(text)); } //CHANGE private static String intArrToString(int [] text) { char[] tmp = new char[text.length]; for (int i=0; i<text.length; i++) { tmp[i] = (char) text[i]; } // Extract part of the suffix we need to compare return new String(tmp, 0, text.length); } private static int[] toIntArray(String s) { int[] text = new int[s.length()]; for(int i=0;i<s.length();i++)text[i] = s.charAt(i); return text; } public SuffixArray(int[] text) { // CHANGE // if (text == null) throw new IllegalArgumentException(); // T = text.clone(); T = clone(text); N = text.length; construct(); kasai(); } // Construct a suffix array in O(nlog^2(n)) public void construct() { sa = new int[N]; // Maintain suffix ranks in both a matrix with two rows containing the // current and last rank information as well as some sortable rank objects // CHANGE // int[][] suffixRanks = new int[2][N]; TwoDArray suffixRanks = new TwoDArray(2, N); SuffixRankTuple[] ranks = new SuffixRankTuple[N]; // Assign a numerical value to each character in the text for (int i = 0; i < N; i++) { // CHANGE // suffixRanks[0][i] = T[i]; suffixRanks.set(0, i, T[i]); ranks[i] = new SuffixRankTuple(); } // O(logn) for(int pos = 1; pos < N; pos *= 2) { for(int i = 0; i < N; i++) { SuffixRankTuple suffixRank = ranks[i]; suffixRank.firstHalf = suffixRanks.get(0, i); // CHANGE // suffixRank.firstHalf = suffixRanks[0][i]; suffixRank.secondHalf = i+pos < N ? suffixRanks.get(0, i+pos) : -1; // CHANGE // suffixRank.secondHalf = i+pos < N ? suffixRanks[0][i+pos] : -1; suffixRank.originalIndex = i; } // O(nlogn) // CHANGE // java.util.Arrays.sort(ranks); ranks = Arrays.sort(ranks, ranks.length); int newRank = 0; suffixRanks.set(1, ranks[0].originalIndex, 0); // CHANGE // suffixRanks[1][ranks[0].originalIndex] = 0; for (int i = 1; i < N; i++ ) { SuffixRankTuple lastSuffixRank = ranks[i-1]; SuffixRankTuple currSuffixRank = ranks[i]; // If the first half differs from the second half if (currSuffixRank.firstHalf != lastSuffixRank.firstHalf || currSuffixRank.secondHalf != lastSuffixRank.secondHalf) newRank++; suffixRanks.set(1, currSuffixRank.originalIndex, newRank); // CHANGE // suffixRanks[1][currSuffixRank.originalIndex] = newRank; } // Place top row (current row) to be the last row suffixRanks.setRow(0, suffixRanks.getRow(1)); // CHANGE // suffixRanks[0] = suffixRanks[1]; // Optimization to stop early // CHANGE // if (newRank == N-1) break; if (newRank == N-1) pos = N; } // Fill suffix array for (int i = 0; i < N; i++) { sa[i] = ranks[i].originalIndex; ranks[i] = null; } // Cleanup suffixRanks = null; // CHANGE // suffixRanks[0] = suffixRanks[1] = null; suffixRanks = null; ranks = null; } // Constructs the LCP (longest common prefix) array in linear time - O(n) // http://www.mi.fu-berlin.de/wiki/pub/ABI/RnaSeqP4/suffix-array.pdf private void kasai() { lcp = new int[N]; // Compute inverse index values int [] inv = new int[N]; for (int i = 0; i < N; i++) inv[sa[i]] = i; // Current lcp length int len = 0; for (int i = 0; i < N; i++) { if (inv[i] > 0) { // Get the index of where the suffix below is int k = sa[inv[i]-1]; // Compute lcp length. For most loops this is O(1) while( (i + len < N) && (k + len < N) && T[i+len] == T[k+len] ) len++; lcp[inv[i]-1] = len; if (len > 0) len--; } } } // // Runs on O(mlog(n)) where m is the length of the substring // // and n is the length of the text. // // NOTE: This is the naive implementation. There exists an // // implementation which runs in O(m + log(n)) time // public boolean contains(String substr) { // if (substr == null) return false; // if (substr.equals("")) return true; // String suffix_str; // int lo = 0, hi = N - 1; // int substr_len = substr.length(); // while( lo <= hi ) { // int mid = (lo + hi) / 2; // int suffix_index = sa[mid]; // int suffix_len = N - suffix_index; // // CHANGE // char[] tmp = new char[T.length]; // for (int i=0; i<T.length; i++) { // tmp[i] = (char) T[i]; // } // // Extract part of the suffix we need to compare // if (suffix_len <= substr_len) suffix_str = new String(tmp, suffix_index, suffix_len); // else suffix_str = new String(tmp, suffix_index, substr_len); // // CHANGE // // if (suffix_len <= substr_len) suffix_str = new String(T, suffix_index, suffix_len); // // else suffix_str = new String(T, suffix_index, substr_len); // int cmp = suffix_str.compareTo(substr); // // Found a match // if ( cmp == 0 ) { // // To find the first occurrence linear scan up/down // // from here or keep doing binary search // return true; // // Substring is found above // } else if (cmp < 0) { // lo = mid + 1; // // Substring is found below // } else { // hi = mid - 1; // } // } // return false; // } // generator public void forLoop() { // // for (int i = 0; i < blah; i++) // int t = ??; // int i = ??; // boolean b1 = i < ??; // for (int i = ??; i {| |} blah; i=i+??){ // } // } public TreeSet <String> lrs() { int[] localInts = new int[10]; Object[] localObjs = new Object[10]; return lrsGen(localInts, localObjs); } generator public int genInt(int[] localInts, Object[] localObjs, int i) { int local = localInts[0]; int i1 = lcp[??]; int i2 = lcp[i]; int i3 = lcp[local]; int i4 = T[??]; int i5 = T[local]; int i6 = T[i]; int i7 = sa[??]; int i8 = sa[i]; int i9 = sa[local]; int sz = 0; if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; sz = lrss.size(); } int p = {| i, i1, i2, i3, i4, i5, i6, i7, i8, i9, local, ??, T.length, N, sz |}; return p; } public void initVars(int[] localInts, int numLocalInts, Object[] localObjs, int numLocalObjs) { // int t = ??; // if (numLocalInts < 10) { // if (??) { // localInts[numLocalInts] = ??; // numLocalInts++; // } // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new TreeSet<>(); // numLocalObjs++; // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new Object(); // numLocalObjs++; // } // if (numLocalObjs < 10 && ??) { // localObjs[numLocalObjs] = new String(); // numLocalObjs++; // } localInts[0] = 0; localObjs[0] = new TreeSet<>(); } public TreeSet<String> lrsGen(int[] localInts, Object[] localObjs) { char[] tmp; if (??) { initVars(localInts, 0, localObjs, 0); } if (??) { tmp = new char[T.length]; int g1 = genInt(localInts, localObjs, 0); for (int i=0; {| i == g1 | i < g1 | i <= g1 | i > g1 | i >= g1 |}; i++) { char r = (char) genInt(localInts, localObjs, i); if (??) { tmp[??] = r; } if (??) { tmp[i] = r; } } } if (??) { int g1 = genInt(localInts, localObjs, 0); for (int i=0; {| i == g1 | i < g1 | i <= g1 | i > g1 | i >= g1 |}; i++) { boolean comp1 = genGuard(localInts, localObjs, i, tmp); boolean comp2 = genGuard(localInts, localObjs, i, tmp); if (comp1) { if (comp2) { // genStmt(localInts, localObjs, i, tmp); // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; // lrss.clear(); genStmts(localInts, localObjs, i, tmp); } genStmts(localInts, localObjs, i, tmp); // genStmt(localInts, localObjs, i, tmp); // genStmt(localInts, localObjs, i, tmp); // genStmt(localInts, localObjs, i, tmp); // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; // localInts[0] = lcp[i]; // lrss.add(new String(tmp, sa[i], localInts[0])); } } } if (??) { return (TreeSet<String>) localObjs[0]; } return null; } generator public boolean genGuard(int[] localInts, Object[] localObjs, int i, char[] tmp) { int i1 = genInt(localInts, localObjs, i); int i2 = genInt(localInts, localObjs, i); boolean contains = true; if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; int index1 = genInt(localInts, localObjs, i); int index2 = genInt(localInts, localObjs, i); contains = lrss.contains(new String(tmp, index1, index2)); } return {| i1 == i2, i1 < i2, i1 <= i2, contains |}; } generator public void genStmt(int[] localInts, Object[] localObjs, int i, char[] tmp) { if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; lrss.clear(); } if (??) { int i1 = genInt(localInts, localObjs, i); localInts[0] = i1; } if (??) { TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; int index1 = genInt(localInts, localObjs, i); int index2 = genInt(localInts, localObjs, i); lrss.add(new String(tmp, index1, index2)); } } generator public void genStmts(int[] localInts, Object[] localObjs, int i, char[] tmp) { if (??) { genStmt(localInts, localObjs, i, tmp); } if (??) { genStmts(localInts, localObjs, i, tmp); } } // Finds the LRS(s) (Longest Repeated Substring) that occurs in a string. // Traditionally we are only interested in substrings that appear at // least twice, so this method returns an empty set if this is the case. // @return an ordered set of longest repeated substrings public TreeSet <String> lrs2() { int[] localInts = new int[10]; Object[] localObjs = new Object[10]; initVars(localInts, 0, localObjs, 0); // int max_len = 0; // TreeSet <String> lrss = new TreeSet<>(); int max_len = localInts[0]; TreeSet <String> lrss = localObjs[0]; char[] tmp = new char[T.length]; // CHANGE // int g1 = {| T.length, N, max_len, ?? |}; // for (int i=??; i<g1; i=i+??) { for (int i=0; i<T.length; i++) { tmp[i] = (char) T[i]; } // int g2 = {| T.length, N, max_len, ?? |}; // for (int i = ??; i < g2; i=i+??) { for (int i = 0; i < N; i++) { // if (lcp[i] > ?? && lcp[i] >= max_len) { if (lcp[i] > 0 && lcp[i] >= max_len) { // We found a longer LRS if ( lcp[i] > max_len ) { lrss.clear(); } // Append substring to the list and update max max_len = lcp[i]; // CHANGE lrss.add( new String(tmp, sa[i], max_len) ); // lrss.add( new String(T, sa[i], max_len) ); } } return lrss; } // // /** // // * Finds the Longest Common Substring (LCS) between a group of strings. // // * The current implementation takes O(nlog(n)) bounded by the suffix array construction. // // * @param strs - The strings you wish to find the longest common substring between // // * @param K - The minimum number of strings to find the LCS between. K must be at least 2. // // **/ // public static TreeSet<String> lcs(String [] strs, int K) { // // CHANGE // // if (K <= 1) throw new IllegalArgumentException("K must be greater than or equal to 2!"); // if (K <= 1) { // return null; // } // TreeSet<String> lcss = new TreeSet(); // if (strs == null || strs.length <= 1) return lcss; // // L is the concatenated length of all the strings and the sentinels // int L = 0; // final int NUM_SENTINELS = strs.length, N = strs.length; // for(int i = 0; i < N; i++) L += strs[i].length() + 1; // int[] indexMap = new int[L]; // // CHANGE // int LOWEST_ASCII = 1000; // // int LOWEST_ASCII = Integer.MAX_VALUE; // int k = 0; // // Find the lowest ASCII value within the strings. // // Also construct the index map to know which original // // string a given suffix belongs to. // for (int i = 0; i < strs.length; i++) { // String str = strs[i]; // for (int j = 0; j < str.length(); j++) { // int asciiVal = str.charAt(j); // if (asciiVal < LOWEST_ASCII) LOWEST_ASCII = asciiVal; // indexMap[k] = i; // k++; // } // // Record that the sentinel belongs to string i // indexMap[k] = i; // k++; // } // final int SHIFT = LOWEST_ASCII + NUM_SENTINELS + 1; // int sentinel = 0; // int[] T = new int[L]; // // CHANGE // k = 0; // // Construct the new text with the shifted values and the sentinels // for(int i = 0; i < N; i++) { // // for(int i = 0, k = 0; i < N; i++) { // String str = strs[i]; // for (int j = 0; j < str.length(); j++) { // T[k] = ((int)str.charAt(j)) + SHIFT; // k++; // } // T[k] = sentinel; // sentinel++; // k++; // } // // CHANGE // String tmp = intArrToString(T); // SuffixArray sa = new SuffixArray(tmp); // // // SuffixArray sa = new SuffixArray(T); // ArrayDeque <Integer> deque = new ArrayDeque<>(); // HashMap <Integer, Integer> windowColorCount = new HashMap<>(); // HashSet <Integer> windowColors = new HashSet<>(); // // Start the sliding window at the number of sentinels because those // // all get sorted first and we want to ignore them // int lo = NUM_SENTINELS, hi = NUM_SENTINELS, bestLCSLength = 0; // // Add the first color // int firstColor = indexMap[sa.sa[hi]]; // windowColors.add(new Integer(firstColor)); // windowColorCount.put(new Integer(firstColor), new Integer(1)); // int count = 0; // // Maintain a sliding window between lo and hi // while(hi < L) { // int uniqueColors = windowColors.size(); // // Attempt to update the LCS // if (uniqueColors >= K) { // // CHANGE // Integer deqPeekFirst = deque.peekFirst(); // int deqPeekFirst_int = deqPeekFirst.intValue(); // int windowLCP = sa.lcp[deqPeekFirst_int]; // // int windowLCP = sa.lcp[deque.peekFirst()]; // if (windowLCP > 0 && bestLCSLength < windowLCP) { // bestLCSLength = windowLCP; // lcss.clear(); // } // if (windowLCP > 0 && bestLCSLength == windowLCP) { // // Construct the current LCS within the window interval // int pos = sa.sa[lo]; // char[] lcs = new char[windowLCP]; // for (int i = 0; i < windowLCP; i++) lcs[i] = (char)(T[pos+i] - SHIFT); // // CHANGE // lcss.add(new String(lcs, 0, lcs.length)); // // lcss.add(new String(lcs)); // // If you wish to find the original strings to which this longest // // common substring belongs to the indexes of those strings can be // // found in the windowColors set, so just use those indexes on the 'strs' array // } // // Update the colors in our window // int lastColor = indexMap[sa.sa[lo]]; // // CHANGE // Integer colorCount = windowColorCount.get(new Integer(lastColor)); // // Integer colorCount = windowColorCount.get(lastColor); // int check = colorCount.intValue(); // // CHANGE // boolean removed = false; // if (colorCount.intValue() == 1) { // windowColors.remove(new Integer(lastColor)); // removed = true; // } // // if (colorCount == 1) windowColors.remove(lastColor); // // CHANGE // windowColorCount.put(new Integer(lastColor), new Integer(colorCount.intValue() - 1)); // // windowColorCount.put(lastColor, colorCount - 1); // // CHANGE // if (!deque.isEmpty()) { // // CHANGE // deqPeekFirst = deque.peekFirst(); // boolean deqPeekLessThanLo = deqPeekFirst.intValue() <= lo; // // Remove the head if it's outside the new range: [lo+1, hi) // while (!deque.isEmpty() && deqPeekLessThanLo) { // deque.removeFirst(); // deqPeekFirst = deque.peekFirst(); // if (deqPeekFirst != null) { // deqPeekLessThanLo = deqPeekFirst.intValue() <= lo; // } else { // deqPeekLessThanLo = false; // } // } // } // // Decrease the window size // lo++; // // Increase the window size because we don't have enough colors // } else if(hi+1 < L) { // hi++; // int nextColor = indexMap[sa.sa[hi]]; // // CHANGE // Integer nextColor_Int = new Integer(nextColor); // // Update the colors in our window // // CHANGE // windowColors.add(nextColor_Int); // // windowColors.add(nextColor); // // CHANGE // Integer colorCount = windowColorCount.get(nextColor_Int); // // Integer colorCount = windowColorCount.get(nextColor); // // CHANGE // if (colorCount == null) colorCount = new Integer(0); // // if (colorCount == null) colorCount = 0; // // CHANGE // windowColorCount.put(nextColor_Int, new Integer(colorCount.intValue() + 1)); // // windowColorCount.put(nextColor, colorCount + 1); // // CHANGE // if (!deque.isEmpty()) { // // CHANGE // Integer deqPeekLast = deque.peekLast(); // int deqPeekLast_int = deqPeekLast.intValue(); // // CHANGE // // Remove all the worse values in the back of the deque // while(!deque.isEmpty() && sa.lcp[deqPeekLast_int] > sa.lcp[hi-1]) { // // while(!deque.isEmpty() && sa.lcp[deque.peekLast()] > sa.lcp[hi-1]) // deque.removeLast(); // // CHANGE // if (!deque.isEmpty()) { // deqPeekLast = deque.peekLast(); // deqPeekLast_int = deqPeekLast.intValue(); // } // } // } // // CHANGE // deque.addLast(new Integer(hi-1)); // // deque.addLast(hi-1); // } // count++; // } // return lcss; // } // // public void display() { // // System.out.printf("-----i-----SA-----LCP---Suffix\n"); // // for(int i = 0; i < N; i++) { // // int suffixLen = N - sa[i]; // // String suffix = new String(T, sa[i], suffixLen); // // System.out.printf("% 7d % 7d % 7d %s\n", i, sa[i],lcp[i], suffix ); // // } // // } // // CHANGE // // // public static void main(String[] args){ // // harness public static void main() { // // // String[] strs = { "GAGL", "RGAG", "TGAGE" }; // // String[] strs = { "AAGAAGC", "AGAAGT", "CGAAGC" }; // // // String[] strs = { "abca", "bcad", "daca" }; // // // String[] strs = { "abca", "bcad", "daca" }; // // // String[] strs = { "AABC", "BCDC", "BCDE", "CDED" }; // // // String[] strs = { "abcdefg", "bcdefgh", "cdefghi" }; // // // String[] strs = { "xxx", "yyy", "zzz" }; // // TreeSet <String> lcss = SuffixArray.lcs(strs, 2); // // // System.out.println(lcss); // // // SuffixArray sa = new SuffixArray("abracadabra"); // // // System.out.println(sa); // // // System.out.println(java.util.Arrays.toString(sa.sa)); // // // System.out.println(java.util.Arrays.toString(sa.lcp)); // // // SuffixArray sa = new SuffixArray("ababcabaa"); // // // sa.display(); // // } }
added some to SuffixArray test
test/axioms/benchmarks/SuffixArray_bigger/SuffixArray_loops.java
added some to SuffixArray test
<ide><path>est/axioms/benchmarks/SuffixArray_bigger/SuffixArray_loops.java <ide> <ide> if (comp1) { <ide> if (comp2) { <del> // genStmt(localInts, localObjs, i, tmp); <ide> // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; <ide> // lrss.clear(); <ide> genStmts(localInts, localObjs, i, tmp); <ide> } <ide> genStmts(localInts, localObjs, i, tmp); <del> // genStmt(localInts, localObjs, i, tmp); <del> // genStmt(localInts, localObjs, i, tmp); <del> // genStmt(localInts, localObjs, i, tmp); <del> // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; <add> // TreeSet<String> lrss = (TreeSet<String>) localObjs[0]; <ide> // localInts[0] = lcp[i]; <ide> // lrss.add(new String(tmp, sa[i], localInts[0])); <ide> }
Java
apache-2.0
6cc4d07a3ac791d971d9e64d843e95b92f3b1511
0
UniTime/unitime,rafati/unitime,zuzanamullerova/unitime,UniTime/unitime,nikeshmhr/unitime,zuzanamullerova/unitime,sktoo/timetabling-system-,nikeshmhr/unitime,zuzanamullerova/unitime,maciej-zygmunt/unitime,rafati/unitime,maciej-zygmunt/unitime,nikeshmhr/unitime,sktoo/timetabling-system-,sktoo/timetabling-system-,UniTime/unitime,rafati/unitime,maciej-zygmunt/unitime
/* * UniTime 3.1 (University Timetabling Application) * Copyright (C) 2008, UniTime.org, and individual contributors * as indicated by the @authors tag. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package org.unitime.timetable.solver.exam.ui; import java.io.Serializable; import java.util.Collection; import java.util.Comparator; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import java.util.Map.Entry; import org.unitime.timetable.model.ClassEvent; import org.unitime.timetable.model.ClassInstructor; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.DepartmentalInstructor; import org.unitime.timetable.model.DistributionObject; import org.unitime.timetable.model.DistributionPref; import org.unitime.timetable.model.ExamConflict; import org.unitime.timetable.model.ExamPeriod; import org.unitime.timetable.model.Location; import org.unitime.timetable.model.Meeting; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.model.SolverParameterDef; import org.unitime.timetable.model.Student; import org.unitime.timetable.model.dao.ClassEventDAO; import org.unitime.timetable.solver.exam.ExamModel; import org.unitime.timetable.solver.exam.ExamResourceUnavailability; import net.sf.cpsolver.exam.model.Exam; import net.sf.cpsolver.exam.model.ExamDistributionConstraint; import net.sf.cpsolver.exam.model.ExamInstructor; import net.sf.cpsolver.exam.model.ExamPlacement; import net.sf.cpsolver.exam.model.ExamStudent; /** * @author Tomas Muller */ public class ExamAssignmentInfo extends ExamAssignment implements Serializable { private TreeSet<DirectConflict> iDirects = new TreeSet(); private TreeSet<BackToBackConflict> iBackToBacks = new TreeSet(); private TreeSet<MoreThanTwoADayConflict> iMoreThanTwoADays = new TreeSet(); private TreeSet<DirectConflict> iInstructorDirects = new TreeSet(); private TreeSet<BackToBackConflict> iInstructorBackToBacks = new TreeSet(); private TreeSet<MoreThanTwoADayConflict> iInstructorMoreThanTwoADays = new TreeSet(); private TreeSet<DistributionConflict> iDistributions = new TreeSet(); public ExamAssignmentInfo(ExamPlacement placement) { this((Exam)placement.variable(),placement); } public ExamAssignmentInfo(Exam exam, ExamPlacement placement) { super(exam, placement); if (placement!=null) { ExamModel model = (ExamModel)exam.getModel(); Hashtable<Exam,DirectConflict> directs = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); for (Iterator i=student.getExams(placement.getPeriod()).iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; DirectConflict dc = directs.get(other); if (dc==null) { dc = new DirectConflict(new ExamAssignment((ExamPlacement)other.getAssignment())); directs.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(student.getId()); } } iDirects.addAll(directs.values()); int btbDist = model.getBackToBackDistance(); Hashtable<Exam,BackToBackConflict> backToBacks = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); if (placement.getPeriod().prev()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().prev().getDay()==placement.getPeriod().getDay()) { Set exams = student.getExams(placement.getPeriod().prev()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); BackToBackConflict btb = backToBacks.get(other); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(student.getId()); } } } if (placement.getPeriod().next()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().next().getDay()==placement.getPeriod().getDay()) { Set exams = student.getExams(placement.getPeriod().next()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); BackToBackConflict btb = backToBacks.get(other); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(student.getId()); } } } } iBackToBacks.addAll(backToBacks.values()); Hashtable<String,MoreThanTwoADayConflict> m2ds = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); Set exams = student.getExamsADay(placement.getPeriod()); int nrExams = exams.size() + (exams.contains(exam)?0:1); if (nrExams<=2) continue; TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; examIds.add(other.getId()); otherExams.add(new ExamAssignment((ExamPlacement)other.getAssignment())); } MoreThanTwoADayConflict m2d = m2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); m2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); m2d.getStudents().add(student.getId()); } iMoreThanTwoADays.addAll(m2ds.values()); Hashtable<Exam,DirectConflict> idirects = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); for (Iterator i=instructor.getExams(placement.getPeriod()).iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; DirectConflict dc = idirects.get(other); if (dc==null) { dc = new DirectConflict(new ExamAssignment((ExamPlacement)other.getAssignment())); idirects.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(instructor.getId()); } } iInstructorDirects.addAll(idirects.values()); Hashtable<Exam,BackToBackConflict> ibackToBacks = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); if (placement.getPeriod().prev()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().prev().getDay()==placement.getPeriod().getDay()) { Set exams = instructor.getExams(placement.getPeriod().prev()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); BackToBackConflict btb = ibackToBacks.get(other); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(instructor.getId()); } } } if (placement.getPeriod().next()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().next().getDay()==placement.getPeriod().getDay()) { Set exams = instructor.getExams(placement.getPeriod().next()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); BackToBackConflict btb = ibackToBacks.get(other); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(instructor.getId()); } } } } iInstructorBackToBacks.addAll(ibackToBacks.values()); Hashtable<String,MoreThanTwoADayConflict> im2ds = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); Set exams = instructor.getExamsADay(placement.getPeriod()); int nrExams = exams.size() + (exams.contains(exam)?0:1); if (nrExams<=2) continue; TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; examIds.add(other.getId()); otherExams.add(new ExamAssignment((ExamPlacement)other.getAssignment())); } MoreThanTwoADayConflict m2d = im2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); im2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); m2d.getStudents().add(instructor.getId()); } iInstructorMoreThanTwoADays.addAll(im2ds.values()); computeUnavailablility(exam, model.getUnavailabilities(placement.getPeriod())); for (Enumeration e=exam.getDistributionConstraints().elements();e.hasMoreElements();) { ExamDistributionConstraint dc = (ExamDistributionConstraint)e.nextElement(); if (dc.isHard()) { if (dc.inConflict(placement)) iDistributions.add(new DistributionConflict(dc,exam)); } else { if (!dc.isSatisfied(placement)) iDistributions.add(new DistributionConflict(dc,exam)); } } } } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam) { super(exam); if (exam.getConflicts()!=null && !exam.getConflicts().isEmpty()) { for (Iterator i=exam.getConflicts().iterator();i.hasNext();) { ExamConflict conf = (ExamConflict)i.next(); if (conf.isDirectConflict()) { ExamAssignment other = null; for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other = new ExamAssignment(x); } if (conf.getNrStudents()>0) { iDirects.add(new DirectConflict(other, conf, true)); iNrDirectConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorDirects.add(new DirectConflict(other, conf, false)); iNrInstructorDirectConflicts += conf.getNrInstructors(); } } else if (conf.isBackToBackConflict()) { ExamAssignment other = null; for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other = new ExamAssignment(x); } if (other==null) continue; if (conf.getNrStudents()>0) { iBackToBacks.add(new BackToBackConflict(other, conf, true)); iNrBackToBackConflicts += conf.getNrStudents(); if (conf.isDistanceBackToBackConflict()) iNrDistanceBackToBackConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorBackToBacks.add(new BackToBackConflict(other, conf, false)); iNrInstructorBackToBackConflicts += conf.getNrInstructors(); if (conf.isDistanceBackToBackConflict()) iNrInstructorDistanceBackToBackConflicts += conf.getNrInstructors(); } } else if (conf.isMoreThanTwoADayConflict()) { TreeSet other = new TreeSet(); for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other.add(new ExamAssignment(x)); } if (other.size()<2) continue; if (conf.getNrStudents()>0) { iMoreThanTwoADays.add(new MoreThanTwoADayConflict(other, conf, true)); iNrMoreThanTwoADayConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorMoreThanTwoADays.add(new MoreThanTwoADayConflict(other, conf, false)); iNrInstructorMoreThanTwoADayConflicts += conf.getNrInstructors(); } } } } for (Iterator i=exam.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!check(pref, exam, getPeriod(), getRooms(), null)) iDistributions.add(new DistributionConflict(pref, exam)); } if (org.unitime.timetable.model.Exam.sExamTypeMidterm==exam.getExamType() && exam.getAssignedPeriod()!=null) { computeUnavailablility(exam, exam.getAssignedPeriod().getUniqueId()); for (Iterator i=exam.getInstructors().iterator();i.hasNext();) computeUnavailablility((DepartmentalInstructor)i.next(), exam.getAssignedPeriod()); } } private void computeUnavailablility(Exam exam, Vector<ExamResourceUnavailability> unavailabilities) { if (unavailabilities==null || unavailabilities.isEmpty()) return; for (ExamResourceUnavailability unavailability : unavailabilities) { Vector<Long> commonStudents = new Vector(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); if (unavailability.getStudentIds().contains(student.getId())) commonStudents.add(student.getId()); } if (!commonStudents.isEmpty()) iDirects.add(new DirectConflict(unavailability, commonStudents)); Vector<Long> commonInstructors = new Vector(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); if (unavailability.getInstructorIds().contains(instructor.getId())) commonInstructors.add(instructor.getId()); } if (!commonInstructors.isEmpty()) iInstructorDirects.add(new DirectConflict(unavailability, commonInstructors)); } } /* private void computeUnavailablility(Hashtable<Assignment, Set<Long>> studentAssignments, ExamPeriod period) { for (Map.Entry<Assignment, Set<Long>> entry : studentAssignments.entrySet()) { if (!period.overlap(entry.getKey())) continue; iDirects.add(new DirectConflict(entry.getKey(), entry.getValue())); } } */ private void computeUnavailablility(org.unitime.timetable.model.Exam exam, Long periodId) { meetings: for (Map.Entry<Meeting, Set<Long>> entry : exam.getOverlappingStudentMeetings(periodId).entrySet()) { for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); if (entry.getKey().getEvent().getUniqueId().equals(dc.getOtherEventId())) { dc.addMeeting(entry.getKey()); continue meetings; } } iDirects.add(new DirectConflict(entry.getKey(), entry.getValue())); } } private void computeUnavailablility(DepartmentalInstructor instructor, ExamPeriod period) { for (Iterator j=instructor.getClasses().iterator();j.hasNext();) { ClassInstructor ci = (ClassInstructor)j.next(); if (!ci.isLead()) continue; meetings: for (Iterator k=period.findOverlappingClassMeetings(ci.getClassInstructing().getUniqueId()).iterator();k.hasNext();) { Meeting meeting = (Meeting)k.next(); for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); if (meeting.getEvent().getUniqueId().equals(dc.getOtherEventId())) { dc.incNrStudents(); dc.getStudents().add(instructor.getUniqueId()); dc.addMeeting(meeting); continue meetings; } } DirectConflict dc = new DirectConflict(meeting); dc.getStudents().add(instructor.getUniqueId()); iInstructorDirects.add(dc); } } } public boolean check(DistributionPref pref, org.unitime.timetable.model.Exam exam, ExamPeriod assignedPeriod, Collection<ExamRoomInfo> assignedRooms, Hashtable<Long,ExamAssignment> table) { if (PreferenceLevel.sNeutral.equals(pref.getPrefLevel().getPrefProlog())) return true; boolean positive = PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sStronglyPreferred.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sPreferred.equals(pref.getPrefLevel().getPrefProlog()); if ("EX_SAME_PER".equals(pref.getDistributionType().getReference())) { if (positive) { //same period ExamPeriod period = null; for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (period==null) period = p; else if (!period.equals(p)) return false; } return true; } else { //different period HashSet periods = new HashSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (!periods.add(p)) return false; } return true; } } else if ("EX_PRECEDENCE".equals(pref.getDistributionType().getReference())) { TreeSet distObjects = new TreeSet( positive?new Comparator<DistributionObject>() { public int compare(DistributionObject d1, DistributionObject d2) { return d1.getSequenceNumber().compareTo(d2.getSequenceNumber()); } }:new Comparator<DistributionObject>() { public int compare(DistributionObject d1, DistributionObject d2) { return d2.getSequenceNumber().compareTo(d1.getSequenceNumber()); } }); distObjects.addAll(pref.getDistributionObjects()); ExamPeriod prev = null; for (Iterator i=distObjects.iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (prev!=null && prev.compareTo(p)>=0) return false; prev = p; } return true; } else if ("EX_SAME_ROOM".equals(pref.getDistributionType().getReference())) { if (positive) { //same room Collection<ExamRoomInfo> rooms = null; for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); Collection<ExamRoomInfo> r = (x.equals(exam)?assignedRooms:getAssignedRooms(x, table)); if (r==null) continue; if (rooms==null) rooms = r; else if (!rooms.containsAll(r) && !r.containsAll(rooms)) return false; } return true; } else { //different room Collection<ExamRoomInfo> allRooms = new HashSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); Collection<ExamRoomInfo> r = (x.equals(exam)?assignedRooms:getAssignedRooms(x, table)); if (r==null) continue; for (ExamRoomInfo room : r) { if (!allRooms.add(room)) return false; } } return true; } } return false; } public static ExamPeriod getAssignedPeriod(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); return (assignment==null?exam.getAssignedPeriod():assignment.getPeriod()); } public static TreeSet<ExamRoomInfo> getAssignedRooms(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); if (assignment!=null) return assignment.getRooms(); TreeSet<ExamRoomInfo> rooms = new TreeSet(); for (Iterator i=exam.getAssignedRooms().iterator();i.hasNext();) { Location location = (Location)i.next(); rooms.add(new ExamRoomInfo(location,0)); } return rooms; } public static ExamAssignment getAssignment(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); return (assignment==null?new ExamAssignment(exam):assignment); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms) throws Exception { this(exam, period, rooms, exam.getStudentExams(), null); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms, Hashtable<Long, ExamAssignment> table) throws Exception { this(exam, period, rooms, exam.getStudentExams(), table); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms, Hashtable<Long, Set<org.unitime.timetable.model.Exam>> examStudents, Hashtable<Long, ExamAssignment> table) throws Exception { super(exam, period, rooms); generateConflicts(exam, examStudents, table); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { super(exam); generateConflicts(exam, exam.getStudentExams(), table); } public void generateConflicts(org.unitime.timetable.model.Exam exam, Hashtable<Long, Set<org.unitime.timetable.model.Exam>> examStudents, Hashtable<Long, ExamAssignment> table) { if (getPeriod()==null) return; int btbDist = -1; boolean btbDayBreak = false; SolverParameterDef btbDistDef = SolverParameterDef.findByName("Exams.BackToBackDistance"); if (btbDistDef!=null && btbDistDef.getDefault()!=null) btbDist = Integer.parseInt(btbDistDef.getDefault()); SolverParameterDef btbDayBreakDef = SolverParameterDef.findByName("Exams.IsDayBreakBackToBack"); if (btbDayBreakDef!=null && btbDayBreakDef.getDefault()!=null) btbDayBreak = "true".equals(btbDayBreakDef.getDefault()); Hashtable<org.unitime.timetable.model.Exam,DirectConflict> directs = new Hashtable(); Hashtable<org.unitime.timetable.model.Exam,BackToBackConflict> backToBacks = new Hashtable(); Hashtable<String,MoreThanTwoADayConflict> m2ds = new Hashtable(); for (Entry<Long,Set<org.unitime.timetable.model.Exam>> studentExams : examStudents.entrySet()) { TreeSet sameDateExams = new TreeSet(); for (org.unitime.timetable.model.Exam other : studentExams.getValue()) { if (other.equals(getExam())) continue; ExamPeriod otherPeriod = getAssignedPeriod(other, table); if (otherPeriod==null) continue; if (getPeriod().equals(otherPeriod)) { //direct conflict DirectConflict dc = directs.get(other); if (dc==null) { dc = new DirectConflict(getAssignment(other, table)); directs.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(studentExams.getKey()); iNrDirectConflicts++; } else if (getPeriod().isBackToBack(otherPeriod,btbDayBreak)) { BackToBackConflict btb = backToBacks.get(other); double distance = Location.getDistance(getRooms(), getAssignedRooms(other, table)); if (btb==null) { btb = new BackToBackConflict(getAssignment(other, table), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(studentExams.getKey()); iNrBackToBackConflicts++; if (btb.isDistance()) iNrDistanceBackToBackConflicts++; } if (getPeriod().getDateOffset().equals(otherPeriod.getDateOffset())) sameDateExams.add(other); } if (sameDateExams.size()>=2) { TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator j=sameDateExams.iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); examIds.add(other.getUniqueId()); otherExams.add(getAssignment(other, table)); } MoreThanTwoADayConflict m2d = m2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); m2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); iNrMoreThanTwoADayConflicts++; m2d.getStudents().add(studentExams.getKey()); } } iDirects.addAll(directs.values()); iBackToBacks.addAll(backToBacks.values()); iMoreThanTwoADays.addAll(m2ds.values()); if (org.unitime.timetable.model.Exam.sExamTypeMidterm==getExamType()) computeUnavailablility(exam,getPeriodId()); Hashtable<org.unitime.timetable.model.Exam,DirectConflict> idirects = new Hashtable(); Hashtable<org.unitime.timetable.model.Exam,BackToBackConflict> ibackToBacks = new Hashtable(); Hashtable<String,MoreThanTwoADayConflict> im2ds = new Hashtable(); for (Iterator i=getExam().getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); TreeSet sameDateExams = new TreeSet(); for (Iterator j=instructor.getExams(getExam().getExamType()).iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); if (other.equals(getExam())) continue; ExamPeriod otherPeriod = getAssignedPeriod(other, table); if (otherPeriod==null) continue; if (getPeriod().equals(otherPeriod)) { //direct conflict DirectConflict dc = idirects.get(other); if (dc==null) { dc = new DirectConflict(getAssignment(other, table)); idirects.put(other, dc); } else dc.incNrStudents(); iNrInstructorDirectConflicts++; dc.getStudents().add(instructor.getUniqueId()); } else if (getPeriod().isBackToBack(otherPeriod,btbDayBreak)) { BackToBackConflict btb = ibackToBacks.get(other); double distance = Location.getDistance(getRooms(), getAssignedRooms(other, table)); if (btb==null) { btb = new BackToBackConflict(getAssignment(other, table), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); iNrInstructorBackToBackConflicts++; if (btb.isDistance()) iNrInstructorDistanceBackToBackConflicts++; btb.getStudents().add(instructor.getUniqueId()); } if (getPeriod().getDateOffset().equals(otherPeriod.getDateOffset())) sameDateExams.add(other); } if (org.unitime.timetable.model.Exam.sExamTypeMidterm==getExam().getExamType()) computeUnavailablility(instructor, getPeriod()); if (sameDateExams.size()>=2) { TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator j=sameDateExams.iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); examIds.add(other.getUniqueId()); otherExams.add(getAssignment(other, table)); } MoreThanTwoADayConflict m2d = im2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); im2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); iNrInstructorMoreThanTwoADayConflicts++; m2d.getStudents().add(instructor.getUniqueId()); } } iInstructorDirects.addAll(idirects.values()); iInstructorBackToBacks.addAll(ibackToBacks.values()); iInstructorMoreThanTwoADays.addAll(im2ds.values()); for (Iterator i=getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!check(pref, getExam(), getPeriod(), getRooms(), table)) iDistributions.add(new DistributionConflict(pref, getExam())); } } public TreeSet<DirectConflict> getDirectConflicts() { return iDirects; } public TreeSet<BackToBackConflict> getBackToBackConflicts() { return iBackToBacks; } public TreeSet<MoreThanTwoADayConflict> getMoreThanTwoADaysConflicts() { return iMoreThanTwoADays; } public int getNrDirectConflicts() { int ret = 0; for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); ret += dc.getNrStudents(); } return ret; } public int getNrBackToBackConflicts() { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); ret += btb.getNrStudents(); } return ret; } public int getNrDistanceBackToBackConflicts() { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) ret += btb.getNrStudents(); } return ret; } public int getNrMoreThanTwoConflicts() { int ret = 0; for (Iterator i=iMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); ret += m2d.getNrStudents(); } return ret; } public int getNrDirectConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); for (Enumeration f=dc.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrDistanceBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrMoreThanTwoConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); for (Enumeration f=m2d.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public TreeSet<DistributionConflict> getDistributionConflicts() { return iDistributions; } public String getDistributionConflictsHtml(String delim) { String ret = ""; for (Iterator i=iDistributions.iterator();i.hasNext();) { DistributionConflict dc = (DistributionConflict)i.next(); if (ret.length()>0) ret+=delim; ret+=dc.getTypeHtml(); } return ret; } public String getDistributionConflictsList(String delim) { String ret = ""; for (Iterator i=iDistributions.iterator();i.hasNext();) { DistributionConflict dc = (DistributionConflict)i.next(); if (ret.length()>0) ret+=delim; ret+=PreferenceLevel.prolog2abbv(dc.getPreference())+" "+dc.getType(); } return ret; } public int getNrDistributionConflicts() { return iDistributions.size(); } public boolean getHasConflicts() { return !getDirectConflicts().isEmpty() || !getBackToBackConflicts().isEmpty() || !getMoreThanTwoADaysConflicts().isEmpty(); } public String getConflictTable() { return getConflictTable(true); } public String getConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getDirectConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getMoreThanTwoADaysConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getBackToBackConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getConflictInfoTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DirectConflict dc : getDirectConflicts()) ret += dc.toString(true); for (MoreThanTwoADayConflict m2d : getMoreThanTwoADaysConflicts()) ret += m2d.toString(true); for (BackToBackConflict btb : getBackToBackConflicts()) ret += btb.toString(true); ret += "</table>"; return ret; } public String getDistributionConflictTable() { return getDistributionConflictTable(true); } public String getDistributionConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Preference</i></td>"; ret += "<td><i>Distribution</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getDistributionConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getDistributionInfoConflictTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Preference</i></td>"; ret += "<td><i>Distribution</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DistributionConflict dc : getDistributionConflicts()) ret += dc.toString(true); ret += "</table>"; return ret; } public TreeSet<DirectConflict> getInstructorDirectConflicts() { return iInstructorDirects; } public TreeSet<BackToBackConflict> getInstructorBackToBackConflicts() { return iInstructorBackToBacks; } public TreeSet<MoreThanTwoADayConflict> getInstructorMoreThanTwoADaysConflicts() { return iInstructorMoreThanTwoADays; } public int getNrInstructorDirectConflicts() { int ret = 0; for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); ret += dc.getNrStudents(); } return ret; } public int getNrInstructorBackToBackConflicts() { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); ret += btb.getNrStudents(); } return ret; } public int getNrInstructorDistanceBackToBackConflicts() { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) ret += btb.getNrStudents(); } return ret; } public int getNrInstructorMoreThanTwoConflicts() { int ret = 0; for (Iterator i=iInstructorMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); ret += m2d.getNrStudents(); } return ret; } public int getNrInstructorDirectConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); for (Enumeration f=dc.getStudents().elements();f.hasMoreElements();) if (dc.getOtherEventId()!=null) { if (section.getStudentIds().contains(f.nextElement())) ret++; } else ret++; } return ret; } public int getNrInstructorBackToBackConflicts(ExamSectionInfo section) { return getNrInstructorBackToBackConflicts(); } public int getNrInstructorDistanceBackToBackConflicts(ExamSectionInfo section) { return getNrInstructorDistanceBackToBackConflicts(); } public int getNrInstructorMoreThanTwoConflicts(ExamSectionInfo section) { return getNrInstructorMoreThanTwoConflicts(); } public boolean getHasInstructorConflicts() { return !getInstructorDirectConflicts().isEmpty() || !getInstructorBackToBackConflicts().isEmpty() || !getInstructorMoreThanTwoADaysConflicts().isEmpty(); } public String getInstructorConflictTable() { return getInstructorConflictTable(true); } public String getInstructorConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Instructors</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getInstructorDirectConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getInstructorMoreThanTwoADaysConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getInstructorBackToBackConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getInstructorConflictInfoTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DirectConflict dc : getInstructorDirectConflicts()) ret += dc.toString(true); for (MoreThanTwoADayConflict m2d : getInstructorMoreThanTwoADaysConflicts()) ret += m2d.toString(true); for (BackToBackConflict btb : getInstructorBackToBackConflicts()) ret += btb.toString(true); ret += "</table>"; return ret; } public static class DirectConflict implements Serializable, Comparable<DirectConflict> { protected ExamAssignment iOtherExam = null; protected int iNrStudents = 1; protected Vector<Long> iStudents = new Vector(); protected String iOtherEventName = null; protected String iOtherEventTime = null; protected String iOtherEventDate = null; protected String iOtherEventRoom = null; protected int iOtherEventSize = 0; protected Long iOtherEventId; protected transient ClassEvent iOtherEvent = null; protected DirectConflict(ExamAssignment otherExam) { iOtherExam = otherExam; } protected DirectConflict(ExamAssignment otherExam, ExamConflict conflict, boolean students) { iOtherExam = otherExam; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } } protected DirectConflict(Meeting otherMeeting) { try { iOtherEvent = (ClassEvent)otherMeeting.getEvent(); } catch (ClassCastException e) {} iOtherEventSize = otherMeeting.getEvent().getMaxCapacity(); iOtherEventId = otherMeeting.getEvent().getUniqueId(); iOtherEventName = otherMeeting.getEvent().getEventName(); iOtherEventDate = otherMeeting.dateStr(); iOtherEventTime = otherMeeting.startTime()+" - "+otherMeeting.stopTime(); iOtherEventRoom = otherMeeting.getRoomLabel(); } protected void addMeeting(Meeting otherMeeting) { if (otherMeeting.getLocation()!=null) iOtherEventRoom += (iOtherEventRoom!=null && iOtherEventRoom.length()>0?", ":"")+otherMeeting.getRoomLabel(); } protected DirectConflict(Meeting otherMeeting,Collection<Long> studentIds) { this(otherMeeting); iNrStudents = studentIds.size(); iStudents.addAll(studentIds); } protected DirectConflict(ExamResourceUnavailability unavailability, Vector<Long> studentIds) { iOtherEventId = unavailability.getId(); iOtherEventSize = unavailability.getSize(); iOtherEventName = unavailability.getName(); iOtherEventTime = unavailability.getTime(); iOtherEventDate = unavailability.getDate(); iOtherEventRoom = unavailability.getRoom(); iNrStudents = studentIds.size(); iStudents = studentIds; } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public Vector<Long> getStudents() { return iStudents; } public ExamAssignment getOtherExam() { return iOtherExam; } public Long getOtherEventId() { return iOtherEventId; } public ClassEvent getOtherEvent() { if (iOtherEvent!=null) return iOtherEvent; if (iOtherEventId==null) return null; iOtherEvent = new ClassEventDAO().get(iOtherEventId); return iOtherEvent; } public String getOtherEventName() { return iOtherEventName; } public String getOtherEventRoom() { return iOtherEventRoom; } public String getOtherEventDate() { return iOtherEventDate; } public String getOtherEventTime() { return iOtherEventTime; } public int getOtherEventSize() { return iOtherEventSize; } public Class_ getOtherClass() { return (getOtherEvent()==null?null:getOtherEvent().getClazz()); } public int compareTo(DirectConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; if (getOtherExam()==null) return (c.getOtherExam()==null?0:-1); if (c.getOtherExam()==null) return 1; return getOtherExam().compareTo(c.getOtherExam()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; if (links && getOtherExam()!=null) ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';this.style.cursor='hand';this.style.cursor='pointer';\" onmouseout=\"this.style.backgroundColor='transparent';\" onclick=\"document.location='examInfo.do?examId="+getOtherExam().getExamId()+"&op=Select';\">"; else ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';\" onmouseout=\"this.style.backgroundColor='transparent';\">"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("P")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("P")+";'>"; ret += "Direct"; ret += "</td>"; if (getOtherExam()==null) { if (iOtherEventName!=null) { ret += "<td>"+iOtherEventName+"</td>"; ret += "<td>"+iOtherEventDate+" "+iOtherEventTime+"</td>"; ret += "<td>"+iOtherEventRoom+"</td>"; } else { ret += "<td colspan='3'>Student/instructor not available for unknown reason.</td>"; } } else { ret += "<td>"+getOtherExam().getExamNameHtml()+"</td>"; ret += "<td>"+getOtherExam().getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+getOtherExam().getRoomsNameWithPref(", ")+"</td>"; } ret += "</tr>"; return ret; } } public static class BackToBackConflict implements Serializable, Comparable<BackToBackConflict> { protected ExamAssignment iOtherExam; protected int iNrStudents = 1; protected boolean iIsDistance = false; protected Vector<Long> iStudents = new Vector(); protected double iDistance = 0; protected BackToBackConflict(ExamAssignment otherExam, boolean isDistance, double distance) { iOtherExam = otherExam; iIsDistance = isDistance; iDistance = distance; } protected BackToBackConflict(ExamAssignment otherExam, ExamConflict conflict, boolean students) { iOtherExam = otherExam; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } iIsDistance = conflict.isDistanceBackToBackConflict(); iDistance = conflict.getDistance(); } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public boolean isDistance() { return iIsDistance; } public ExamAssignment getOtherExam() { return iOtherExam; } public Vector<Long> getStudents() { return iStudents; } public double getDistance() { return iDistance; } public int compareTo(BackToBackConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; if (isDistance()!=c.isDistance()) return (isDistance()?-1:1); return getOtherExam().compareTo(c.getOtherExam()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; if (links && getOtherExam()!=null) ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';this.style.cursor='hand';this.style.cursor='pointer';\" onmouseout=\"this.style.backgroundColor='transparent';\" onclick=\"document.location='examInfo.do?examId="+getOtherExam().getExamId()+"&op=Select';\">"; else ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';\" onmouseout=\"this.style.backgroundColor='transparent';\">"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("1")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("1")+";'>"; ret += "Back-To-Back"; if (isDistance()) ret+="<br>("+Math.round(10.0*getDistance())+" m)"; ret += "</td>"; ret += "<td>"+getOtherExam().getExamNameHtml()+"</td>"; ret += "<td>"+getOtherExam().getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+getOtherExam().getRoomsNameWithPref(", ")+"</td>"; ret += "</tr>"; return ret; } } public static class MoreThanTwoADayConflict implements Serializable, Comparable<MoreThanTwoADayConflict> { protected TreeSet<ExamAssignment> iOtherExams; protected int iNrStudents = 1; protected Vector<Long> iStudents = new Vector(); protected MoreThanTwoADayConflict(TreeSet<ExamAssignment> otherExams) { iOtherExams = otherExams; } protected MoreThanTwoADayConflict(TreeSet<ExamAssignment> otherExams, ExamConflict conflict, boolean students) { iOtherExams = otherExams; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public Vector<Long> getStudents() { return iStudents; } public TreeSet<ExamAssignment> getOtherExams() { return iOtherExams; } public int compareTo(MoreThanTwoADayConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; cmp = -Double.compare(getOtherExams().size(), c.getOtherExams().size()); if (cmp!=0) return cmp; Iterator i1 = getOtherExams().iterator(), i2 = c.getOtherExams().iterator(); while (i1.hasNext()) { ExamAssignment a1 = (ExamAssignment)i1.next(); ExamAssignment a2 = (ExamAssignment)i2.next(); if (!a1.equals(a2)) return a1.compareTo(a2); } return 0; } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; String mouseOver = ""; String mouseOut = ""; String id = ""; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamAssignment a = (ExamAssignment)i.next(); id+=a.getExamId(); if (i.hasNext()) id+=":"; } int idx = 0; Vector<Long> ids = new Vector(); for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamAssignment a = (ExamAssignment)i.next(); ids.add(a.getExamId()); mouseOver += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='rgb(223,231,242)';"; if (links) mouseOver += "this.style.cursor='hand';this.style.cursor='pointer';"; mouseOut += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='transparent';"; } idx = 0; if (links) ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color("2")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color("2")+";'>"; ret += "&gt;2 A Day"; ret += "</td>"; for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamAssignment a = (ExamAssignment)i.next(); ret += "<td>"+a.getExamNameHtml()+"</td>"; ret += "<td>"+a.getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+a.getRoomsNameWithPref(", ")+"</td>"; ret += "</tr>"; if (i.hasNext()) { if (links) ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(1+idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; } } return ret; } } public static class DistributionConflict implements Serializable, Comparable<DistributionConflict> { protected TreeSet<ExamInfo> iOtherExams; protected String iPreference; protected Long iId; protected String iType; protected transient DistributionPref iPref = null; protected DistributionConflict(Long id, String type, TreeSet<ExamInfo> otherExams, String preference) { iId = id; iType = type; iOtherExams = otherExams; iPreference = preference; } protected DistributionConflict(ExamDistributionConstraint dc, Exam exclude) { iId = dc.getId(); iType = dc.getTypeString(); iOtherExams = new TreeSet(); for (Enumeration e=dc.variables().elements();e.hasMoreElements();) { Exam exam = (Exam)e.nextElement(); if (exam.equals(exclude)) continue; iOtherExams.add(exam.getAssignment()==null?new ExamInfo(exam):new ExamAssignment(exam,(ExamPlacement)exam.getAssignment())); } iPreference = (dc.isHard()?"R":dc.getWeight()>=2?"-2":"-1"); } protected DistributionConflict(DistributionPref pref, org.unitime.timetable.model.Exam exclude) { iPref = pref; iId = pref.getUniqueId(); iType = pref.getDistributionType().getLabel(); iOtherExams = new TreeSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); org.unitime.timetable.model.Exam exam = (org.unitime.timetable.model.Exam)dObj.getPrefGroup(); if (exam.equals(exclude)) continue; iOtherExams.add(exam.getAssignedPeriod()==null?new ExamInfo(exam):new ExamAssignment(exam)); } iPreference = pref.getPrefLevel().getPrefProlog(); } public Long getId() { return iId; } public String getType() { return iType; } public String getTypeHtml() { String title = PreferenceLevel.prolog2string(getPreference())+" "+getType()+" with "; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamInfo a = (ExamInfo)i.next(); title += a.getExamName(); if (i.hasNext()) title += " and "; } return "<span style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";' title='"+title+"'>"+iType+"</span>"; } public String getPreference() { return iPreference; } public TreeSet<ExamInfo> getOtherExams() { return iOtherExams; } public int hashCode() { return getId().hashCode(); } public boolean equals(Object o) { if (o==null || !(o instanceof DistributionConflict)) return false; DistributionConflict c = (DistributionConflict)o; return getId().equals(c.getId()); } public int compareTo(DistributionConflict c) { Iterator i1 = getOtherExams().iterator(), i2 = c.getOtherExams().iterator(); while (i1.hasNext()) { ExamInfo a1 = (ExamInfo)i1.next(); ExamInfo a2 = (ExamInfo)i2.next(); if (!a1.equals(a2)) return a1.compareTo(a2); } return getId().compareTo(c.getId()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; String mouseOver = ""; String mouseOut = ""; String id = ""; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamInfo a = (ExamInfo)i.next(); id+=a.getExamId(); if (i.hasNext()) id+=":"; } int idx = 0; Vector<Long> ids = new Vector(); for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamInfo a = (ExamInfo)i.next(); ids.add(a.getExamId()); mouseOver += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='rgb(223,231,242)';"; if (links) mouseOver += "this.style.cursor='hand';this.style.cursor='pointer';"; mouseOut += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='transparent';"; } idx = 0; if (links) ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";'>"; ret += PreferenceLevel.prolog2string(getPreference()); ret += "</td>"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";'>"; ret += getType(); ret += "</td>"; for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamInfo a = (ExamInfo)i.next(); ret += "<td>"+a.getExamNameHtml()+"</td>"; if (a instanceof ExamAssignment) { ExamAssignment ea = (ExamAssignment)a; ret += "<td>"+ea.getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+ea.getRoomsNameWithPref(", ")+"</td>"; } else { ret += "<td></td>"; ret += "<td></td>"; } ret += "</tr>"; if (i.hasNext()) { if (links) ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(1+idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; } } return ret; } } }
JavaSource/org/unitime/timetable/solver/exam/ui/ExamAssignmentInfo.java
/* * UniTime 3.1 (University Timetabling Application) * Copyright (C) 2008, UniTime.org, and individual contributors * as indicated by the @authors tag. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package org.unitime.timetable.solver.exam.ui; import java.io.Serializable; import java.util.Collection; import java.util.Comparator; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import java.util.Map.Entry; import org.unitime.timetable.model.ClassEvent; import org.unitime.timetable.model.ClassInstructor; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.DepartmentalInstructor; import org.unitime.timetable.model.DistributionObject; import org.unitime.timetable.model.DistributionPref; import org.unitime.timetable.model.ExamConflict; import org.unitime.timetable.model.ExamPeriod; import org.unitime.timetable.model.Location; import org.unitime.timetable.model.Meeting; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.model.SolverParameterDef; import org.unitime.timetable.model.Student; import org.unitime.timetable.model.dao.ClassEventDAO; import org.unitime.timetable.solver.exam.ExamModel; import org.unitime.timetable.solver.exam.ExamResourceUnavailability; import net.sf.cpsolver.exam.model.Exam; import net.sf.cpsolver.exam.model.ExamDistributionConstraint; import net.sf.cpsolver.exam.model.ExamInstructor; import net.sf.cpsolver.exam.model.ExamPlacement; import net.sf.cpsolver.exam.model.ExamStudent; /** * @author Tomas Muller */ public class ExamAssignmentInfo extends ExamAssignment implements Serializable { private TreeSet<DirectConflict> iDirects = new TreeSet(); private TreeSet<BackToBackConflict> iBackToBacks = new TreeSet(); private TreeSet<MoreThanTwoADayConflict> iMoreThanTwoADays = new TreeSet(); private TreeSet<DirectConflict> iInstructorDirects = new TreeSet(); private TreeSet<BackToBackConflict> iInstructorBackToBacks = new TreeSet(); private TreeSet<MoreThanTwoADayConflict> iInstructorMoreThanTwoADays = new TreeSet(); private TreeSet<DistributionConflict> iDistributions = new TreeSet(); public ExamAssignmentInfo(ExamPlacement placement) { this((Exam)placement.variable(),placement); } public ExamAssignmentInfo(Exam exam, ExamPlacement placement) { super(exam, placement); if (placement!=null) { ExamModel model = (ExamModel)exam.getModel(); Hashtable<Exam,DirectConflict> directs = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); for (Iterator i=student.getExams(placement.getPeriod()).iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; DirectConflict dc = directs.get(other); if (dc==null) { dc = new DirectConflict(new ExamAssignment((ExamPlacement)other.getAssignment())); directs.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(student.getId()); } } iDirects.addAll(directs.values()); int btbDist = model.getBackToBackDistance(); Hashtable<Exam,BackToBackConflict> backToBacks = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); if (placement.getPeriod().prev()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().prev().getDay()==placement.getPeriod().getDay()) { Set exams = student.getExams(placement.getPeriod().prev()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); BackToBackConflict btb = backToBacks.get(other); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(student.getId()); } } } if (placement.getPeriod().next()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().next().getDay()==placement.getPeriod().getDay()) { Set exams = student.getExams(placement.getPeriod().next()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); BackToBackConflict btb = backToBacks.get(other); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(student.getId()); } } } } iBackToBacks.addAll(backToBacks.values()); Hashtable<String,MoreThanTwoADayConflict> m2ds = new Hashtable(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); Set exams = student.getExamsADay(placement.getPeriod()); int nrExams = exams.size() + (exams.contains(exam)?0:1); if (nrExams<=2) continue; TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; examIds.add(other.getId()); otherExams.add(new ExamAssignment((ExamPlacement)other.getAssignment())); } MoreThanTwoADayConflict m2d = m2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); m2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); m2d.getStudents().add(student.getId()); } iMoreThanTwoADays.addAll(m2ds.values()); Hashtable<Exam,DirectConflict> idirects = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); for (Iterator i=instructor.getExams(placement.getPeriod()).iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; DirectConflict dc = idirects.get(other); if (dc==null) { dc = new DirectConflict(new ExamAssignment((ExamPlacement)other.getAssignment())); idirects.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(instructor.getId()); } } iInstructorDirects.addAll(idirects.values()); Hashtable<Exam,BackToBackConflict> ibackToBacks = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); if (placement.getPeriod().prev()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().prev().getDay()==placement.getPeriod().getDay()) { Set exams = instructor.getExams(placement.getPeriod().prev()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); BackToBackConflict btb = ibackToBacks.get(other); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(instructor.getId()); } } } if (placement.getPeriod().next()!=null) { if (model.isDayBreakBackToBack() || placement.getPeriod().next().getDay()==placement.getPeriod().getDay()) { Set exams = instructor.getExams(placement.getPeriod().next()); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); BackToBackConflict btb = ibackToBacks.get(other); double distance = placement.getDistance((ExamPlacement)other.getAssignment()); if (btb==null) { btb = new BackToBackConflict(new ExamAssignment((ExamPlacement)other.getAssignment()), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(instructor.getId()); } } } } iInstructorBackToBacks.addAll(ibackToBacks.values()); Hashtable<String,MoreThanTwoADayConflict> im2ds = new Hashtable(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); Set exams = instructor.getExamsADay(placement.getPeriod()); int nrExams = exams.size() + (exams.contains(exam)?0:1); if (nrExams<=2) continue; TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator i=exams.iterator();i.hasNext();) { Exam other = (Exam)i.next(); if (other.equals(exam)) continue; examIds.add(other.getId()); otherExams.add(new ExamAssignment((ExamPlacement)other.getAssignment())); } MoreThanTwoADayConflict m2d = im2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); im2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); m2d.getStudents().add(instructor.getId()); } iInstructorMoreThanTwoADays.addAll(im2ds.values()); computeUnavailablility(exam, model.getUnavailabilities(placement.getPeriod())); for (Enumeration e=exam.getDistributionConstraints().elements();e.hasMoreElements();) { ExamDistributionConstraint dc = (ExamDistributionConstraint)e.nextElement(); if (dc.isHard()) { if (dc.inConflict(placement)) iDistributions.add(new DistributionConflict(dc,exam)); } else { if (!dc.isSatisfied(placement)) iDistributions.add(new DistributionConflict(dc,exam)); } } } } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam) { super(exam); if (exam.getConflicts()!=null && !exam.getConflicts().isEmpty()) { for (Iterator i=exam.getConflicts().iterator();i.hasNext();) { ExamConflict conf = (ExamConflict)i.next(); if (conf.isDirectConflict()) { ExamAssignment other = null; for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other = new ExamAssignment(x); } if (conf.getNrStudents()>0) { iDirects.add(new DirectConflict(other, conf, true)); iNrDirectConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorDirects.add(new DirectConflict(other, conf, false)); iNrInstructorDirectConflicts += conf.getNrInstructors(); } } else if (conf.isBackToBackConflict()) { ExamAssignment other = null; for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other = new ExamAssignment(x); } if (other==null) continue; if (conf.getNrStudents()>0) { iBackToBacks.add(new BackToBackConflict(other, conf, true)); iNrBackToBackConflicts += conf.getNrStudents(); if (conf.isDistanceBackToBackConflict()) iNrDistanceBackToBackConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorBackToBacks.add(new BackToBackConflict(other, conf, false)); iNrInstructorBackToBackConflicts += conf.getNrInstructors(); if (conf.isDistanceBackToBackConflict()) iNrInstructorDistanceBackToBackConflicts += conf.getNrInstructors(); } } else if (conf.isMoreThanTwoADayConflict()) { TreeSet other = new TreeSet(); for (Iterator j=conf.getExams().iterator();j.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)j.next(); if (x.equals(exam)) continue; if (x.getAssignedPeriod()!=null) other.add(new ExamAssignment(x)); } if (other.size()<2) continue; if (conf.getNrStudents()>0) { iMoreThanTwoADays.add(new MoreThanTwoADayConflict(other, conf, true)); iNrMoreThanTwoADayConflicts += conf.getNrStudents(); } if (conf.getNrInstructors()>0) { iInstructorMoreThanTwoADays.add(new MoreThanTwoADayConflict(other, conf, false)); iNrInstructorMoreThanTwoADayConflicts += conf.getNrInstructors(); } } } } for (Iterator i=exam.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!check(pref, exam, getPeriod(), getRooms(), null)) iDistributions.add(new DistributionConflict(pref, exam)); } if (org.unitime.timetable.model.Exam.sExamTypeMidterm==exam.getExamType() && exam.getAssignedPeriod()!=null) { computeUnavailablility(exam, exam.getAssignedPeriod().getUniqueId()); for (Iterator i=exam.getInstructors().iterator();i.hasNext();) computeUnavailablility((DepartmentalInstructor)i.next(), exam.getAssignedPeriod()); } } private void computeUnavailablility(Exam exam, Vector<ExamResourceUnavailability> unavailabilities) { if (unavailabilities==null || unavailabilities.isEmpty()) return; for (ExamResourceUnavailability unavailability : unavailabilities) { Vector<Long> commonStudents = new Vector(); for (Enumeration e=exam.getStudents().elements();e.hasMoreElements();) { ExamStudent student = (ExamStudent)e.nextElement(); if (unavailability.getStudentIds().contains(student.getId())) commonStudents.add(student.getId()); } if (!commonStudents.isEmpty()) iDirects.add(new DirectConflict(unavailability, commonStudents)); Vector<Long> commonInstructors = new Vector(); for (Enumeration e=exam.getInstructors().elements();e.hasMoreElements();) { ExamInstructor instructor = (ExamInstructor)e.nextElement(); if (unavailability.getInstructorIds().contains(instructor.getId())) commonInstructors.add(instructor.getId()); } if (!commonInstructors.isEmpty()) iInstructorDirects.add(new DirectConflict(unavailability, commonInstructors)); } } /* private void computeUnavailablility(Hashtable<Assignment, Set<Long>> studentAssignments, ExamPeriod period) { for (Map.Entry<Assignment, Set<Long>> entry : studentAssignments.entrySet()) { if (!period.overlap(entry.getKey())) continue; iDirects.add(new DirectConflict(entry.getKey(), entry.getValue())); } } */ private void computeUnavailablility(org.unitime.timetable.model.Exam exam, Long periodId) { meetings: for (Map.Entry<Meeting, Set<Long>> entry : exam.getOverlappingStudentMeetings(periodId).entrySet()) { for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); if (entry.getKey().getEvent().getUniqueId().equals(dc.getOtherEventId())) { dc.addMeeting(entry.getKey()); continue meetings; } } iDirects.add(new DirectConflict(entry.getKey(), entry.getValue())); } } private void computeUnavailablility(DepartmentalInstructor instructor, ExamPeriod period) { for (Iterator j=instructor.getClasses().iterator();j.hasNext();) { ClassInstructor ci = (ClassInstructor)j.next(); if (!ci.isLead()) continue; meetings: for (Iterator k=period.findOverlappingClassMeetings(ci.getClassInstructing().getUniqueId()).iterator();k.hasNext();) { Meeting meeting = (Meeting)k.next(); for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); if (meeting.getEvent().getUniqueId().equals(dc.getOtherEventId())) { dc.incNrStudents(); dc.getStudents().add(instructor.getUniqueId()); dc.addMeeting(meeting); continue meetings; } } DirectConflict dc = new DirectConflict(meeting); dc.getStudents().add(instructor.getUniqueId()); iInstructorDirects.add(dc); } } } public boolean check(DistributionPref pref, org.unitime.timetable.model.Exam exam, ExamPeriod assignedPeriod, Collection<ExamRoomInfo> assignedRooms, Hashtable<Long,ExamAssignment> table) { if (PreferenceLevel.sNeutral.equals(pref.getPrefLevel().getPrefProlog())) return true; boolean positive = PreferenceLevel.sRequired.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sStronglyPreferred.equals(pref.getPrefLevel().getPrefProlog()) || PreferenceLevel.sPreferred.equals(pref.getPrefLevel().getPrefProlog()); if ("EX_SAME_PER".equals(pref.getDistributionType().getReference())) { if (positive) { //same period ExamPeriod period = null; for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (period==null) period = p; else if (!period.equals(p)) return false; } return true; } else { //different period HashSet periods = new HashSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (!periods.add(p)) return false; } return true; } } else if ("EX_PRECEDENCE".equals(pref.getDistributionType().getReference())) { TreeSet distObjects = new TreeSet( positive?new Comparator<DistributionObject>() { public int compare(DistributionObject d1, DistributionObject d2) { return d1.getSequenceNumber().compareTo(d2.getSequenceNumber()); } }:new Comparator<DistributionObject>() { public int compare(DistributionObject d1, DistributionObject d2) { return d2.getSequenceNumber().compareTo(d1.getSequenceNumber()); } }); distObjects.addAll(pref.getDistributionObjects()); ExamPeriod prev = null; for (Iterator i=distObjects.iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); ExamPeriod p = (x.equals(exam)?assignedPeriod:getAssignedPeriod(x,table)); if (p==null) continue; if (prev!=null && prev.compareTo(p)>=0) return false; prev = p; } return true; } else if ("EX_SAME_ROOM".equals(pref.getDistributionType().getReference())) { if (positive) { //same room Collection<ExamRoomInfo> rooms = null; for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); Collection<ExamRoomInfo> r = (x.equals(exam)?assignedRooms:getAssignedRooms(x, table)); if (r==null) continue; if (rooms==null) rooms = r; else if (!rooms.containsAll(r) && !r.containsAll(rooms)) return false; } return true; } else { //different room Collection<ExamRoomInfo> allRooms = new HashSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { org.unitime.timetable.model.Exam x = (org.unitime.timetable.model.Exam)((DistributionObject)i.next()).getPrefGroup(); Collection<ExamRoomInfo> r = (x.equals(exam)?assignedRooms:getAssignedRooms(x, table)); if (r==null) continue; for (ExamRoomInfo room : r) { if (!allRooms.add(room)) return false; } } return true; } } return false; } public static ExamPeriod getAssignedPeriod(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); return (assignment==null?exam.getAssignedPeriod():assignment.getPeriod()); } public static TreeSet<ExamRoomInfo> getAssignedRooms(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); if (assignment!=null) return assignment.getRooms(); TreeSet<ExamRoomInfo> rooms = new TreeSet(); for (Iterator i=exam.getAssignedRooms().iterator();i.hasNext();) { Location location = (Location)i.next(); rooms.add(new ExamRoomInfo(location,0)); } return rooms; } public static ExamAssignment getAssignment(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { ExamAssignment assignment = (table==null?null:table.get(exam.getUniqueId())); return (assignment==null?new ExamAssignment(exam):assignment); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms) throws Exception { this(exam, period, rooms, exam.getStudentExams(), null); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms, Hashtable<Long, ExamAssignment> table) throws Exception { this(exam, period, rooms, exam.getStudentExams(), table); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, ExamPeriod period, Collection<ExamRoomInfo> rooms, Hashtable<Long, Set<org.unitime.timetable.model.Exam>> examStudents, Hashtable<Long, ExamAssignment> table) throws Exception { super(exam, period, rooms); generateConflicts(exam, examStudents, table); } public ExamAssignmentInfo(org.unitime.timetable.model.Exam exam, Hashtable<Long, ExamAssignment> table) { super(exam); generateConflicts(exam, exam.getStudentExams(), table); } public void generateConflicts(org.unitime.timetable.model.Exam exam, Hashtable<Long, Set<org.unitime.timetable.model.Exam>> examStudents, Hashtable<Long, ExamAssignment> table) { if (getPeriod()==null) return; int btbDist = -1; boolean btbDayBreak = false; SolverParameterDef btbDistDef = SolverParameterDef.findByName("Exams.BackToBackDistance"); if (btbDistDef!=null && btbDistDef.getDefault()!=null) btbDist = Integer.parseInt(btbDistDef.getDefault()); SolverParameterDef btbDayBreakDef = SolverParameterDef.findByName("Exams.IsDayBreakBackToBack"); if (btbDayBreakDef!=null && btbDayBreakDef.getDefault()!=null) btbDayBreak = "true".equals(btbDayBreakDef.getDefault()); Hashtable<org.unitime.timetable.model.Exam,DirectConflict> directs = new Hashtable(); Hashtable<org.unitime.timetable.model.Exam,BackToBackConflict> backToBacks = new Hashtable(); Hashtable<String,MoreThanTwoADayConflict> m2ds = new Hashtable(); for (Entry<Long,Set<org.unitime.timetable.model.Exam>> studentExams : examStudents.entrySet()) { TreeSet sameDateExams = new TreeSet(); for (org.unitime.timetable.model.Exam other : studentExams.getValue()) { if (other.equals(getExam())) continue; ExamPeriod otherPeriod = getAssignedPeriod(other, table); if (otherPeriod==null) continue; if (getPeriod().equals(otherPeriod)) { //direct conflict DirectConflict dc = directs.get(other); if (dc==null) { dc = new DirectConflict(getAssignment(other, table)); directs.put(other, dc); } else dc.incNrStudents(); dc.getStudents().add(studentExams.getKey()); iNrDirectConflicts++; } else if (getPeriod().isBackToBack(otherPeriod,btbDayBreak)) { BackToBackConflict btb = backToBacks.get(other); double distance = Location.getDistance(getRooms(), getAssignedRooms(other, table)); if (btb==null) { btb = new BackToBackConflict(getAssignment(other, table), (btbDist<0?false:distance>btbDist), distance); backToBacks.put(other, btb); } else btb.incNrStudents(); btb.getStudents().add(studentExams.getKey()); iNrBackToBackConflicts++; if (btb.isDistance()) iNrDistanceBackToBackConflicts++; } if (getPeriod().getDateOffset().equals(otherPeriod.getDateOffset())) sameDateExams.add(other); } if (sameDateExams.size()>=2) { TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator j=sameDateExams.iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); examIds.add(other.getUniqueId()); otherExams.add(getAssignment(other, table)); } MoreThanTwoADayConflict m2d = m2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); m2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); iNrMoreThanTwoADayConflicts++; m2d.getStudents().add(studentExams.getKey()); } } iDirects.addAll(directs.values()); iBackToBacks.addAll(backToBacks.values()); iMoreThanTwoADays.addAll(m2ds.values()); if (org.unitime.timetable.model.Exam.sExamTypeMidterm==getExamType()) computeUnavailablility(exam,getPeriodId()); Hashtable<org.unitime.timetable.model.Exam,DirectConflict> idirects = new Hashtable(); Hashtable<org.unitime.timetable.model.Exam,BackToBackConflict> ibackToBacks = new Hashtable(); Hashtable<String,MoreThanTwoADayConflict> im2ds = new Hashtable(); for (Iterator i=getExam().getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); TreeSet sameDateExams = new TreeSet(); for (Iterator j=instructor.getExams(getExam().getExamType()).iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); if (other.equals(getExam())) continue; ExamPeriod otherPeriod = getAssignedPeriod(other, table); if (otherPeriod==null) continue; if (getPeriod().equals(otherPeriod)) { //direct conflict DirectConflict dc = idirects.get(other); if (dc==null) { dc = new DirectConflict(getAssignment(other, table)); idirects.put(other, dc); } else dc.incNrStudents(); iNrInstructorDirectConflicts++; dc.getStudents().add(instructor.getUniqueId()); } else if (getPeriod().isBackToBack(otherPeriod,btbDayBreak)) { BackToBackConflict btb = ibackToBacks.get(other); double distance = Location.getDistance(getRooms(), getAssignedRooms(other, table)); if (btb==null) { btb = new BackToBackConflict(getAssignment(other, table), (btbDist<0?false:distance>btbDist), distance); ibackToBacks.put(other, btb); } else btb.incNrStudents(); iNrInstructorBackToBackConflicts++; if (btb.isDistance()) iNrInstructorDistanceBackToBackConflicts++; btb.getStudents().add(instructor.getUniqueId()); } if (getPeriod().getDateOffset().equals(otherPeriod.getDateOffset())) sameDateExams.add(other); } if (org.unitime.timetable.model.Exam.sExamTypeMidterm==getExam().getExamType()) computeUnavailablility(instructor, getPeriod()); if (sameDateExams.size()>=2) { TreeSet examIds = new TreeSet(); TreeSet otherExams = new TreeSet(); for (Iterator j=sameDateExams.iterator();j.hasNext();) { org.unitime.timetable.model.Exam other = (org.unitime.timetable.model.Exam)j.next(); examIds.add(other.getUniqueId()); otherExams.add(getAssignment(other, table)); } MoreThanTwoADayConflict m2d = im2ds.get(examIds.toString()); if (m2d==null) { m2d = new MoreThanTwoADayConflict(otherExams); im2ds.put(examIds.toString(), m2d); } else m2d.incNrStudents(); iNrInstructorMoreThanTwoADayConflicts++; m2d.getStudents().add(instructor.getUniqueId()); } } iInstructorDirects.addAll(idirects.values()); iInstructorBackToBacks.addAll(ibackToBacks.values()); iInstructorMoreThanTwoADays.addAll(im2ds.values()); for (Iterator i=getExam().getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); DistributionPref pref = dObj.getDistributionPref(); if (!check(pref, getExam(), getPeriod(), getRooms(), table)) iDistributions.add(new DistributionConflict(pref, getExam())); } } public TreeSet<DirectConflict> getDirectConflicts() { return iDirects; } public TreeSet<BackToBackConflict> getBackToBackConflicts() { return iBackToBacks; } public TreeSet<MoreThanTwoADayConflict> getMoreThanTwoADaysConflicts() { return iMoreThanTwoADays; } public int getNrDirectConflicts() { int ret = 0; for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); ret += dc.getNrStudents(); } return ret; } public int getNrBackToBackConflicts() { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); ret += btb.getNrStudents(); } return ret; } public int getNrDistanceBackToBackConflicts() { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) ret += btb.getNrStudents(); } return ret; } public int getNrMoreThanTwoConflicts() { int ret = 0; for (Iterator i=iMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); ret += m2d.getNrStudents(); } return ret; } public int getNrDirectConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); for (Enumeration f=dc.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrDistanceBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrMoreThanTwoConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); for (Enumeration f=m2d.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public TreeSet<DistributionConflict> getDistributionConflicts() { return iDistributions; } public String getDistributionConflictsHtml(String delim) { String ret = ""; for (Iterator i=iDistributions.iterator();i.hasNext();) { DistributionConflict dc = (DistributionConflict)i.next(); if (ret.length()>0) ret+=delim; ret+=dc.getTypeHtml(); } return ret; } public String getDistributionConflictsList(String delim) { String ret = ""; for (Iterator i=iDistributions.iterator();i.hasNext();) { DistributionConflict dc = (DistributionConflict)i.next(); if (ret.length()>0) ret+=delim; ret+=PreferenceLevel.prolog2abbv(dc.getPreference())+" "+dc.getType(); } return ret; } public int getNrDistributionConflicts() { return iDistributions.size(); } public boolean getHasConflicts() { return !getDirectConflicts().isEmpty() || !getBackToBackConflicts().isEmpty() || !getMoreThanTwoADaysConflicts().isEmpty(); } public String getConflictTable() { return getConflictTable(true); } public String getConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getDirectConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getMoreThanTwoADaysConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getBackToBackConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getConflictInfoTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DirectConflict dc : getDirectConflicts()) ret += dc.toString(true); for (MoreThanTwoADayConflict m2d : getMoreThanTwoADaysConflicts()) ret += m2d.toString(true); for (BackToBackConflict btb : getBackToBackConflicts()) ret += btb.toString(true); ret += "</table>"; return ret; } public String getDistributionConflictTable() { return getDistributionConflictTable(true); } public String getDistributionConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Preference</i></td>"; ret += "<td><i>Distribution</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getDistributionConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getDistributionInfoConflictTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Preference</i></td>"; ret += "<td><i>Distribution</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DistributionConflict dc : getDistributionConflicts()) ret += dc.toString(true); ret += "</table>"; return ret; } public TreeSet<DirectConflict> getInstructorDirectConflicts() { return iInstructorDirects; } public TreeSet<BackToBackConflict> getInstructorBackToBackConflicts() { return iInstructorBackToBacks; } public TreeSet<MoreThanTwoADayConflict> getInstructorMoreThanTwoADaysConflicts() { return iInstructorMoreThanTwoADays; } public int getNrInstructorDirectConflicts() { int ret = 0; for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); ret += dc.getNrStudents(); } return ret; } public int getNrInstructorBackToBackConflicts() { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); ret += btb.getNrStudents(); } return ret; } public int getNrInstructorDistanceBackToBackConflicts() { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) ret += btb.getNrStudents(); } return ret; } public int getNrInstructorMoreThanTwoConflicts() { int ret = 0; for (Iterator i=iInstructorMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); ret += m2d.getNrStudents(); } return ret; } public int getNrInstructorDirectConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { DirectConflict dc = (DirectConflict)i.next(); for (Enumeration f=dc.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrInstructorBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrInstructorDistanceBackToBackConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { BackToBackConflict btb = (BackToBackConflict)i.next(); if (btb.isDistance()) for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public int getNrInstructorMoreThanTwoConflicts(ExamSectionInfo section) { int ret = 0; for (Iterator i=iInstructorMoreThanTwoADays.iterator();i.hasNext();) { MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); for (Enumeration f=m2d.getStudents().elements();f.hasMoreElements();) if (section.getStudentIds().contains(f.nextElement())) ret++; } return ret; } public boolean getHasInstructorConflicts() { return !getInstructorDirectConflicts().isEmpty() || !getInstructorBackToBackConflicts().isEmpty() || !getInstructorMoreThanTwoADaysConflicts().isEmpty(); } public String getInstructorConflictTable() { return getInstructorConflictTable(true); } public String getInstructorConflictTable(boolean header) { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; if (header) { ret += "<tr>"; ret += "<td><i>Instructors</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; } for (Iterator i=getInstructorDirectConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getInstructorMoreThanTwoADaysConflicts().iterator();i.hasNext();) ret += i.next().toString(); for (Iterator i=getInstructorBackToBackConflicts().iterator();i.hasNext();) ret += i.next().toString(); ret += "</table>"; return ret; } public String getInstructorConflictInfoTable() { String ret = "<table border='0' width='95%' cellspacing='0' cellpadding='3'>"; ret += "<tr>"; ret += "<td><i>Students</i></td>"; ret += "<td><i>Conflict</i></td>"; ret += "<td><i>Exam</i></td>"; ret += "<td><i>Period</i></td>"; ret += "<td><i>Room</i></td>"; ret += "</tr>"; for (DirectConflict dc : getInstructorDirectConflicts()) ret += dc.toString(true); for (MoreThanTwoADayConflict m2d : getInstructorMoreThanTwoADaysConflicts()) ret += m2d.toString(true); for (BackToBackConflict btb : getInstructorBackToBackConflicts()) ret += btb.toString(true); ret += "</table>"; return ret; } public static class DirectConflict implements Serializable, Comparable<DirectConflict> { protected ExamAssignment iOtherExam = null; protected int iNrStudents = 1; protected Vector<Long> iStudents = new Vector(); protected String iOtherEventName = null; protected String iOtherEventTime = null; protected String iOtherEventDate = null; protected String iOtherEventRoom = null; protected int iOtherEventSize = 0; protected Long iOtherEventId; protected transient ClassEvent iOtherEvent = null; protected DirectConflict(ExamAssignment otherExam) { iOtherExam = otherExam; } protected DirectConflict(ExamAssignment otherExam, ExamConflict conflict, boolean students) { iOtherExam = otherExam; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } } protected DirectConflict(Meeting otherMeeting) { try { iOtherEvent = (ClassEvent)otherMeeting.getEvent(); } catch (ClassCastException e) {} iOtherEventSize = otherMeeting.getEvent().getMaxCapacity(); iOtherEventId = otherMeeting.getEvent().getUniqueId(); iOtherEventName = otherMeeting.getEvent().getEventName(); iOtherEventDate = otherMeeting.dateStr(); iOtherEventTime = otherMeeting.startTime()+" - "+otherMeeting.stopTime(); iOtherEventRoom = otherMeeting.getRoomLabel(); } protected void addMeeting(Meeting otherMeeting) { if (otherMeeting.getLocation()!=null) iOtherEventRoom += (iOtherEventRoom!=null && iOtherEventRoom.length()>0?", ":"")+otherMeeting.getRoomLabel(); } protected DirectConflict(Meeting otherMeeting,Collection<Long> studentIds) { this(otherMeeting); iNrStudents = studentIds.size(); iStudents.addAll(studentIds); } protected DirectConflict(ExamResourceUnavailability unavailability, Vector<Long> studentIds) { iOtherEventId = unavailability.getId(); iOtherEventSize = unavailability.getSize(); iOtherEventName = unavailability.getName(); iOtherEventTime = unavailability.getTime(); iOtherEventDate = unavailability.getDate(); iOtherEventRoom = unavailability.getRoom(); iNrStudents = studentIds.size(); iStudents = studentIds; } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public Vector<Long> getStudents() { return iStudents; } public ExamAssignment getOtherExam() { return iOtherExam; } public Long getOtherEventId() { return iOtherEventId; } public ClassEvent getOtherEvent() { if (iOtherEvent!=null) return iOtherEvent; if (iOtherEventId==null) return null; iOtherEvent = new ClassEventDAO().get(iOtherEventId); return iOtherEvent; } public String getOtherEventName() { return iOtherEventName; } public String getOtherEventRoom() { return iOtherEventRoom; } public String getOtherEventDate() { return iOtherEventDate; } public String getOtherEventTime() { return iOtherEventTime; } public int getOtherEventSize() { return iOtherEventSize; } public Class_ getOtherClass() { return (getOtherEvent()==null?null:getOtherEvent().getClazz()); } public int compareTo(DirectConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; if (getOtherExam()==null) return (c.getOtherExam()==null?0:-1); if (c.getOtherExam()==null) return 1; return getOtherExam().compareTo(c.getOtherExam()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; if (links && getOtherExam()!=null) ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';this.style.cursor='hand';this.style.cursor='pointer';\" onmouseout=\"this.style.backgroundColor='transparent';\" onclick=\"document.location='examInfo.do?examId="+getOtherExam().getExamId()+"&op=Select';\">"; else ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';\" onmouseout=\"this.style.backgroundColor='transparent';\">"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("P")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("P")+";'>"; ret += "Direct"; ret += "</td>"; if (getOtherExam()==null) { if (iOtherEventName!=null) { ret += "<td>"+iOtherEventName+"</td>"; ret += "<td>"+iOtherEventDate+" "+iOtherEventTime+"</td>"; ret += "<td>"+iOtherEventRoom+"</td>"; } else { ret += "<td colspan='3'>Student/instructor not available for unknown reason.</td>"; } } else { ret += "<td>"+getOtherExam().getExamNameHtml()+"</td>"; ret += "<td>"+getOtherExam().getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+getOtherExam().getRoomsNameWithPref(", ")+"</td>"; } ret += "</tr>"; return ret; } } public static class BackToBackConflict implements Serializable, Comparable<BackToBackConflict> { protected ExamAssignment iOtherExam; protected int iNrStudents = 1; protected boolean iIsDistance = false; protected Vector<Long> iStudents = new Vector(); protected double iDistance = 0; protected BackToBackConflict(ExamAssignment otherExam, boolean isDistance, double distance) { iOtherExam = otherExam; iIsDistance = isDistance; iDistance = distance; } protected BackToBackConflict(ExamAssignment otherExam, ExamConflict conflict, boolean students) { iOtherExam = otherExam; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } iIsDistance = conflict.isDistanceBackToBackConflict(); iDistance = conflict.getDistance(); } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public boolean isDistance() { return iIsDistance; } public ExamAssignment getOtherExam() { return iOtherExam; } public Vector<Long> getStudents() { return iStudents; } public double getDistance() { return iDistance; } public int compareTo(BackToBackConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; if (isDistance()!=c.isDistance()) return (isDistance()?-1:1); return getOtherExam().compareTo(c.getOtherExam()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; if (links && getOtherExam()!=null) ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';this.style.cursor='hand';this.style.cursor='pointer';\" onmouseout=\"this.style.backgroundColor='transparent';\" onclick=\"document.location='examInfo.do?examId="+getOtherExam().getExamId()+"&op=Select';\">"; else ret += "<tr onmouseover=\"this.style.backgroundColor='rgb(223,231,242)';\" onmouseout=\"this.style.backgroundColor='transparent';\">"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("1")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td style='font-weight:bold;color:"+PreferenceLevel.prolog2color("1")+";'>"; ret += "Back-To-Back"; if (isDistance()) ret+="<br>("+Math.round(10.0*getDistance())+" m)"; ret += "</td>"; ret += "<td>"+getOtherExam().getExamNameHtml()+"</td>"; ret += "<td>"+getOtherExam().getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+getOtherExam().getRoomsNameWithPref(", ")+"</td>"; ret += "</tr>"; return ret; } } public static class MoreThanTwoADayConflict implements Serializable, Comparable<MoreThanTwoADayConflict> { protected TreeSet<ExamAssignment> iOtherExams; protected int iNrStudents = 1; protected Vector<Long> iStudents = new Vector(); protected MoreThanTwoADayConflict(TreeSet<ExamAssignment> otherExams) { iOtherExams = otherExams; } protected MoreThanTwoADayConflict(TreeSet<ExamAssignment> otherExams, ExamConflict conflict, boolean students) { iOtherExams = otherExams; if (students) { iNrStudents = conflict.getStudents().size(); for (Iterator i=conflict.getStudents().iterator();i.hasNext();) { Student student = (Student)i.next(); iStudents.add(student.getUniqueId()); } } else { iNrStudents = conflict.getInstructors().size(); for (Iterator i=conflict.getInstructors().iterator();i.hasNext();) { DepartmentalInstructor instructor = (DepartmentalInstructor)i.next(); iStudents.add(instructor.getUniqueId()); } } } protected void incNrStudents() { iNrStudents++; } public int getNrStudents() { return iNrStudents; } public Vector<Long> getStudents() { return iStudents; } public TreeSet<ExamAssignment> getOtherExams() { return iOtherExams; } public int compareTo(MoreThanTwoADayConflict c) { int cmp = -Double.compare(getNrStudents(), c.getNrStudents()); if (cmp!=0) return cmp; cmp = -Double.compare(getOtherExams().size(), c.getOtherExams().size()); if (cmp!=0) return cmp; Iterator i1 = getOtherExams().iterator(), i2 = c.getOtherExams().iterator(); while (i1.hasNext()) { ExamAssignment a1 = (ExamAssignment)i1.next(); ExamAssignment a2 = (ExamAssignment)i2.next(); if (!a1.equals(a2)) return a1.compareTo(a2); } return 0; } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; String mouseOver = ""; String mouseOut = ""; String id = ""; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamAssignment a = (ExamAssignment)i.next(); id+=a.getExamId(); if (i.hasNext()) id+=":"; } int idx = 0; Vector<Long> ids = new Vector(); for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamAssignment a = (ExamAssignment)i.next(); ids.add(a.getExamId()); mouseOver += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='rgb(223,231,242)';"; if (links) mouseOver += "this.style.cursor='hand';this.style.cursor='pointer';"; mouseOut += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='transparent';"; } idx = 0; if (links) ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color("2")+";'>"; ret += String.valueOf(getNrStudents()); ret += "</td>"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color("2")+";'>"; ret += "&gt;2 A Day"; ret += "</td>"; for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamAssignment a = (ExamAssignment)i.next(); ret += "<td>"+a.getExamNameHtml()+"</td>"; ret += "<td>"+a.getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+a.getRoomsNameWithPref(", ")+"</td>"; ret += "</tr>"; if (i.hasNext()) { if (links) ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(1+idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; } } return ret; } } public static class DistributionConflict implements Serializable, Comparable<DistributionConflict> { protected TreeSet<ExamInfo> iOtherExams; protected String iPreference; protected Long iId; protected String iType; protected transient DistributionPref iPref = null; protected DistributionConflict(Long id, String type, TreeSet<ExamInfo> otherExams, String preference) { iId = id; iType = type; iOtherExams = otherExams; iPreference = preference; } protected DistributionConflict(ExamDistributionConstraint dc, Exam exclude) { iId = dc.getId(); iType = dc.getTypeString(); iOtherExams = new TreeSet(); for (Enumeration e=dc.variables().elements();e.hasMoreElements();) { Exam exam = (Exam)e.nextElement(); if (exam.equals(exclude)) continue; iOtherExams.add(exam.getAssignment()==null?new ExamInfo(exam):new ExamAssignment(exam,(ExamPlacement)exam.getAssignment())); } iPreference = (dc.isHard()?"R":dc.getWeight()>=2?"-2":"-1"); } protected DistributionConflict(DistributionPref pref, org.unitime.timetable.model.Exam exclude) { iPref = pref; iId = pref.getUniqueId(); iType = pref.getDistributionType().getLabel(); iOtherExams = new TreeSet(); for (Iterator i=pref.getDistributionObjects().iterator();i.hasNext();) { DistributionObject dObj = (DistributionObject)i.next(); org.unitime.timetable.model.Exam exam = (org.unitime.timetable.model.Exam)dObj.getPrefGroup(); if (exam.equals(exclude)) continue; iOtherExams.add(exam.getAssignedPeriod()==null?new ExamInfo(exam):new ExamAssignment(exam)); } iPreference = pref.getPrefLevel().getPrefProlog(); } public Long getId() { return iId; } public String getType() { return iType; } public String getTypeHtml() { String title = PreferenceLevel.prolog2string(getPreference())+" "+getType()+" with "; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamInfo a = (ExamInfo)i.next(); title += a.getExamName(); if (i.hasNext()) title += " and "; } return "<span style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";' title='"+title+"'>"+iType+"</span>"; } public String getPreference() { return iPreference; } public TreeSet<ExamInfo> getOtherExams() { return iOtherExams; } public int hashCode() { return getId().hashCode(); } public boolean equals(Object o) { if (o==null || !(o instanceof DistributionConflict)) return false; DistributionConflict c = (DistributionConflict)o; return getId().equals(c.getId()); } public int compareTo(DistributionConflict c) { Iterator i1 = getOtherExams().iterator(), i2 = c.getOtherExams().iterator(); while (i1.hasNext()) { ExamInfo a1 = (ExamInfo)i1.next(); ExamInfo a2 = (ExamInfo)i2.next(); if (!a1.equals(a2)) return a1.compareTo(a2); } return getId().compareTo(c.getId()); } public String toString() { return toString(false); } public String toString(boolean links) { String ret = ""; String mouseOver = ""; String mouseOut = ""; String id = ""; for (Iterator i=getOtherExams().iterator();i.hasNext();) { ExamInfo a = (ExamInfo)i.next(); id+=a.getExamId(); if (i.hasNext()) id+=":"; } int idx = 0; Vector<Long> ids = new Vector(); for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamInfo a = (ExamInfo)i.next(); ids.add(a.getExamId()); mouseOver += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='rgb(223,231,242)';"; if (links) mouseOver += "this.style.cursor='hand';this.style.cursor='pointer';"; mouseOut += "document.getElementById('"+id+":"+idx+"').style.backgroundColor='transparent';"; } idx = 0; if (links) ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+idx+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";'>"; ret += PreferenceLevel.prolog2string(getPreference()); ret += "</td>"; ret += "<td valign='top' rowspan='"+getOtherExams().size()+"' style='font-weight:bold;color:"+PreferenceLevel.prolog2color(getPreference())+";'>"; ret += getType(); ret += "</td>"; for (Iterator i=getOtherExams().iterator();i.hasNext();idx++) { ExamInfo a = (ExamInfo)i.next(); ret += "<td>"+a.getExamNameHtml()+"</td>"; if (a instanceof ExamAssignment) { ExamAssignment ea = (ExamAssignment)a; ret += "<td>"+ea.getPeriodAbbreviationWithPref()+"</td>"; ret += "<td>"+ea.getRoomsNameWithPref(", ")+"</td>"; } else { ret += "<td></td>"; ret += "<td></td>"; } ret += "</tr>"; if (i.hasNext()) { if (links) ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\" onclick=\"document.location='examInfo.do?examId="+ids.elementAt(1+idx)+"&op=Select';\">"; else ret += "<tr id='"+id+":"+(1+idx)+"' onmouseover=\""+mouseOver+"\" onmouseout=\""+mouseOut+"\">"; } } return ret; } } }
computation of instructor conflicts on instructional offering components corrected
JavaSource/org/unitime/timetable/solver/exam/ui/ExamAssignmentInfo.java
computation of instructor conflicts on instructional offering components corrected
<ide><path>avaSource/org/unitime/timetable/solver/exam/ui/ExamAssignmentInfo.java <ide> for (Iterator i=iInstructorDirects.iterator();i.hasNext();) { <ide> DirectConflict dc = (DirectConflict)i.next(); <ide> for (Enumeration f=dc.getStudents().elements();f.hasMoreElements();) <del> if (section.getStudentIds().contains(f.nextElement())) ret++; <add> if (dc.getOtherEventId()!=null) { <add> if (section.getStudentIds().contains(f.nextElement())) ret++; <add> } else ret++; <ide> } <ide> return ret; <ide> } <ide> <ide> public int getNrInstructorBackToBackConflicts(ExamSectionInfo section) { <del> int ret = 0; <del> for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { <del> BackToBackConflict btb = (BackToBackConflict)i.next(); <del> for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) <del> if (section.getStudentIds().contains(f.nextElement())) ret++; <del> } <del> return ret; <add> return getNrInstructorBackToBackConflicts(); <ide> } <ide> <ide> public int getNrInstructorDistanceBackToBackConflicts(ExamSectionInfo section) { <del> int ret = 0; <del> for (Iterator i=iInstructorBackToBacks.iterator();i.hasNext();) { <del> BackToBackConflict btb = (BackToBackConflict)i.next(); <del> if (btb.isDistance()) <del> for (Enumeration f=btb.getStudents().elements();f.hasMoreElements();) <del> if (section.getStudentIds().contains(f.nextElement())) ret++; <del> } <del> return ret; <add> return getNrInstructorDistanceBackToBackConflicts(); <ide> } <ide> <ide> public int getNrInstructorMoreThanTwoConflicts(ExamSectionInfo section) { <del> int ret = 0; <del> for (Iterator i=iInstructorMoreThanTwoADays.iterator();i.hasNext();) { <del> MoreThanTwoADayConflict m2d = (MoreThanTwoADayConflict)i.next(); <del> for (Enumeration f=m2d.getStudents().elements();f.hasMoreElements();) <del> if (section.getStudentIds().contains(f.nextElement())) ret++; <del> } <del> return ret; <add> return getNrInstructorMoreThanTwoConflicts(); <ide> } <ide> <ide> public boolean getHasInstructorConflicts() {
Java
apache-2.0
4e2b8bc7f6ee62c0cc958f38935f520e567d57f6
0
vivekmore/generator-jhipster,stevehouel/generator-jhipster,rifatdover/generator-jhipster,ziogiugno/generator-jhipster,hdurix/generator-jhipster,jhipster/generator-jhipster,duderoot/generator-jhipster,rkohel/generator-jhipster,vivekmore/generator-jhipster,ctamisier/generator-jhipster,danielpetisme/generator-jhipster,gzsombor/generator-jhipster,cbornet/generator-jhipster,atomfrede/generator-jhipster,mosoft521/generator-jhipster,ruddell/generator-jhipster,dynamicguy/generator-jhipster,PierreBesson/generator-jhipster,ziogiugno/generator-jhipster,mraible/generator-jhipster,pascalgrimaud/generator-jhipster,Tcharl/generator-jhipster,jkutner/generator-jhipster,sendilkumarn/generator-jhipster,sohibegit/generator-jhipster,hdurix/generator-jhipster,siliconharborlabs/generator-jhipster,yongli82/generator-jhipster,Tcharl/generator-jhipster,PierreBesson/generator-jhipster,stevehouel/generator-jhipster,deepu105/generator-jhipster,danielpetisme/generator-jhipster,mraible/generator-jhipster,robertmilowski/generator-jhipster,dynamicguy/generator-jhipster,danielpetisme/generator-jhipster,JulienMrgrd/generator-jhipster,sohibegit/generator-jhipster,ctamisier/generator-jhipster,eosimosu/generator-jhipster,pascalgrimaud/generator-jhipster,jhipster/generator-jhipster,ruddell/generator-jhipster,eosimosu/generator-jhipster,Tcharl/generator-jhipster,dalbelap/generator-jhipster,lrkwz/generator-jhipster,baskeboler/generator-jhipster,liseri/generator-jhipster,mraible/generator-jhipster,maniacneron/generator-jhipster,hdurix/generator-jhipster,mosoft521/generator-jhipster,rifatdover/generator-jhipster,JulienMrgrd/generator-jhipster,jhipster/generator-jhipster,ctamisier/generator-jhipster,ruddell/generator-jhipster,rkohel/generator-jhipster,gmarziou/generator-jhipster,wmarques/generator-jhipster,duderoot/generator-jhipster,gmarziou/generator-jhipster,siliconharborlabs/generator-jhipster,gmarziou/generator-jhipster,maniacneron/generator-jhipster,yongli82/generator-jhipster,dynamicguy/generator-jhipster,vivekmore/generator-jhipster,sohibegit/generator-jhipster,wmarques/generator-jhipster,lrkwz/generator-jhipster,ramzimaalej/generator-jhipster,JulienMrgrd/generator-jhipster,jhipster/generator-jhipster,gmarziou/generator-jhipster,deepu105/generator-jhipster,vivekmore/generator-jhipster,robertmilowski/generator-jhipster,rkohel/generator-jhipster,stevehouel/generator-jhipster,vivekmore/generator-jhipster,duderoot/generator-jhipster,ctamisier/generator-jhipster,dynamicguy/generator-jhipster,erikkemperman/generator-jhipster,gzsombor/generator-jhipster,sendilkumarn/generator-jhipster,baskeboler/generator-jhipster,atomfrede/generator-jhipster,mraible/generator-jhipster,atomfrede/generator-jhipster,nkolosnjaji/generator-jhipster,wmarques/generator-jhipster,dalbelap/generator-jhipster,liseri/generator-jhipster,pascalgrimaud/generator-jhipster,xetys/generator-jhipster,robertmilowski/generator-jhipster,maniacneron/generator-jhipster,cbornet/generator-jhipster,Tcharl/generator-jhipster,baskeboler/generator-jhipster,maniacneron/generator-jhipster,liseri/generator-jhipster,Tcharl/generator-jhipster,dimeros/generator-jhipster,atomfrede/generator-jhipster,ziogiugno/generator-jhipster,liseri/generator-jhipster,erikkemperman/generator-jhipster,rkohel/generator-jhipster,ramzimaalej/generator-jhipster,sohibegit/generator-jhipster,dimeros/generator-jhipster,gmarziou/generator-jhipster,jkutner/generator-jhipster,danielpetisme/generator-jhipster,dimeros/generator-jhipster,JulienMrgrd/generator-jhipster,yongli82/generator-jhipster,nkolosnjaji/generator-jhipster,eosimosu/generator-jhipster,PierreBesson/generator-jhipster,gzsombor/generator-jhipster,dalbelap/generator-jhipster,liseri/generator-jhipster,JulienMrgrd/generator-jhipster,jhipster/generator-jhipster,rifatdover/generator-jhipster,PierreBesson/generator-jhipster,ruddell/generator-jhipster,sendilkumarn/generator-jhipster,jkutner/generator-jhipster,lrkwz/generator-jhipster,eosimosu/generator-jhipster,sendilkumarn/generator-jhipster,cbornet/generator-jhipster,hdurix/generator-jhipster,duderoot/generator-jhipster,nkolosnjaji/generator-jhipster,yongli82/generator-jhipster,robertmilowski/generator-jhipster,nkolosnjaji/generator-jhipster,sohibegit/generator-jhipster,danielpetisme/generator-jhipster,dimeros/generator-jhipster,siliconharborlabs/generator-jhipster,duderoot/generator-jhipster,xetys/generator-jhipster,jkutner/generator-jhipster,deepu105/generator-jhipster,PierreBesson/generator-jhipster,ziogiugno/generator-jhipster,deepu105/generator-jhipster,gzsombor/generator-jhipster,maniacneron/generator-jhipster,erikkemperman/generator-jhipster,lrkwz/generator-jhipster,baskeboler/generator-jhipster,mosoft521/generator-jhipster,xetys/generator-jhipster,eosimosu/generator-jhipster,wmarques/generator-jhipster,yongli82/generator-jhipster,dimeros/generator-jhipster,pascalgrimaud/generator-jhipster,siliconharborlabs/generator-jhipster,robertmilowski/generator-jhipster,xetys/generator-jhipster,stevehouel/generator-jhipster,nkolosnjaji/generator-jhipster,mosoft521/generator-jhipster,gzsombor/generator-jhipster,ctamisier/generator-jhipster,sendilkumarn/generator-jhipster,erikkemperman/generator-jhipster,siliconharborlabs/generator-jhipster,erikkemperman/generator-jhipster,ziogiugno/generator-jhipster,rkohel/generator-jhipster,mraible/generator-jhipster,wmarques/generator-jhipster,dalbelap/generator-jhipster,atomfrede/generator-jhipster,hdurix/generator-jhipster,cbornet/generator-jhipster,deepu105/generator-jhipster,cbornet/generator-jhipster,ramzimaalej/generator-jhipster,stevehouel/generator-jhipster,mosoft521/generator-jhipster,dalbelap/generator-jhipster,pascalgrimaud/generator-jhipster,ruddell/generator-jhipster,jkutner/generator-jhipster,baskeboler/generator-jhipster,lrkwz/generator-jhipster
package <%=packageName%>.config; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.servlet.InstrumentedFilter; import com.codahale.metrics.servlets.MetricsServlet;<% if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { %> import com.hazelcast.core.HazelcastInstance;<% } %><% if (clusteredHttpSession == 'hazelcast') { %> import com.hazelcast.web.SessionListener; import com.hazelcast.web.spring.SpringAwareWebFilter;<% } %><% if (!skipClient) { %> import <%=packageName%>.web.filter.CachingHttpHeadersFilter;<% } %> import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.embedded.ConfigurableEmbeddedServletContainer; import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer; import org.springframework.boot.context.embedded.MimeMappings; import org.springframework.boot.context.embedded.ServletContextInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import org.springframework.web.cors.CorsConfiguration; import org.springframework.web.cors.UrlBasedCorsConfigurationSource; import org.springframework.web.filter.CorsFilter; <% if (!skipClient) { %> import java.io.File;<% } %> import java.util.*; import javax.inject.Inject; import javax.servlet.*; /** * Configuration of web application with Servlet 3.0 APIs. */ @Configuration public class WebConfigurer implements ServletContextInitializer, EmbeddedServletContainerCustomizer { private final Logger log = LoggerFactory.getLogger(WebConfigurer.class); @Inject private Environment env; @Inject private JHipsterProperties jHipsterProperties; @Autowired(required = false) private MetricRegistry metricRegistry;<% if (hibernateCache == 'hazelcast') { %> // Hazelcast instance is injected to force its initialization before the Servlet filter uses it. @Inject private HazelcastInstance hazelcastInstance;<% } %> @Override public void onStartup(ServletContext servletContext) throws ServletException { log.info("Web application configuration, using profiles: {}", Arrays.toString(env.getActiveProfiles())); EnumSet<DispatcherType> disps = EnumSet.of(DispatcherType.REQUEST, DispatcherType.FORWARD, DispatcherType.ASYNC);<% if (clusteredHttpSession == 'hazelcast') { %> initClusteredHttpSessionFilter(servletContext, disps);<% } %> initMetrics(servletContext, disps);<% if (!skipClient) { %> if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { initCachingHttpHeadersFilter(servletContext, disps); }<% } %><% if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { %> if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { initH2Console(servletContext); }<% } %> log.info("Web application fully configured"); }<% if (clusteredHttpSession == 'hazelcast') { %> /** * Initializes the Clustered Http Session filter */ private void initClusteredHttpSessionFilter(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Registering Clustered Http Session Filter"); servletContext.addListener(new SessionListener()); FilterRegistration.Dynamic hazelcastWebFilter = servletContext.addFilter("hazelcastWebFilter", new SpringAwareWebFilter()); Map<String, String> parameters = new HashMap<>(); parameters.put("instance-name", "<%=baseName%>"); // Name of the distributed map storing your web session objects parameters.put("map-name", "clustered-http-sessions"); // How is your load-balancer configured? // Setting "sticky-session" to "true" means all requests of a session // are routed to the node where the session is first created. // This is excellent for performance. // If "sticky-session" is set to "false", then when a session is updated // on a node, entries for this session on all other nodes are invalidated. // You have to know how your load-balancer is configured before // setting this parameter. Default is true. parameters.put("sticky-session", "true"); // Name of session id cookie parameters.put("cookie-name", "hazelcast.sessionId"); // Are you debugging? Default is false. if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { parameters.put("debug", "false"); } else { parameters.put("debug", "true"); } // Do you want to shutdown HazelcastInstance during // web application undeploy process? // Default is true. parameters.put("shutdown-on-destroy", "true"); hazelcastWebFilter.setInitParameters(parameters); hazelcastWebFilter.addMappingForUrlPatterns(disps, true, "/*"); hazelcastWebFilter.setAsyncSupported(true); }<% } %> /** * Set up Mime types. */ @Override public void customize(ConfigurableEmbeddedServletContainer container) { MimeMappings mappings = new MimeMappings(MimeMappings.DEFAULT); // IE issue, see https://github.com/jhipster/generator-jhipster/pull/711 mappings.add("html", "text/html;charset=utf-8"); // CloudFoundry issue, see https://github.com/cloudfoundry/gorouter/issues/64 mappings.add("json", "text/html;charset=utf-8"); container.setMimeMappings(mappings);<% if (!skipClient) { %> // Set document root if we're not running from a jar/war if (getClass().getProtectionDomain().getCodeSource().getLocation().getProtocol().equals("file")) { if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { container.setDocumentRoot(new File("<%= CLIENT_DIST_DIR %>")); } else if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { container.setDocumentRoot(new File("<%= CLIENT_MAIN_SRC_DIR %>")); } }<% } %> }<% if (!skipClient) { %> /** * Initializes the caching HTTP Headers Filter. */ private void initCachingHttpHeadersFilter(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Registering Caching HTTP Headers Filter"); FilterRegistration.Dynamic cachingHttpHeadersFilter = servletContext.addFilter("cachingHttpHeadersFilter", new CachingHttpHeadersFilter(jHipsterProperties)); cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/content/*"); cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/app/*"); cachingHttpHeadersFilter.setAsyncSupported(true); }<% } %> /** * Initializes Metrics. */ private void initMetrics(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Initializing Metrics registries"); servletContext.setAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE, metricRegistry); servletContext.setAttribute(MetricsServlet.METRICS_REGISTRY, metricRegistry); log.debug("Registering Metrics Filter"); FilterRegistration.Dynamic metricsFilter = servletContext.addFilter("webappMetricsFilter", new InstrumentedFilter()); metricsFilter.addMappingForUrlPatterns(disps, true, "/*"); metricsFilter.setAsyncSupported(true); log.debug("Registering Metrics Servlet"); ServletRegistration.Dynamic metricsAdminServlet = servletContext.addServlet("metricsServlet", new MetricsServlet()); metricsAdminServlet.addMapping("/metrics/metrics/*"); metricsAdminServlet.setAsyncSupported(true); metricsAdminServlet.setLoadOnStartup(2); } @Bean public CorsFilter corsFilter() { UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); CorsConfiguration config = jHipsterProperties.getCors(); if (config.getAllowedOrigins() != null && !config.getAllowedOrigins().isEmpty()) { source.registerCorsConfiguration("/api/**", config); source.registerCorsConfiguration("/v2/api-docs", config); source.registerCorsConfiguration("/oauth/**", config); } return new CorsFilter(source); }<% if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { %> /** * Initializes H2 console */ private void initH2Console(ServletContext servletContext) { log.debug("Initialize H2 console"); ServletRegistration.Dynamic h2ConsoleServlet = servletContext.addServlet("H2Console", new org.h2.server.web.WebServlet()); h2ConsoleServlet.addMapping("/h2-console/*"); h2ConsoleServlet.setInitParameter("-properties", "<%= SERVER_MAIN_RES_DIR %>"); h2ConsoleServlet.setLoadOnStartup(1); }<% } %> }
generators/server/templates/src/main/java/package/config/_WebConfigurer.java
package <%=packageName%>.config; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.servlet.InstrumentedFilter; import com.codahale.metrics.servlets.MetricsServlet;<% if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { %> import com.hazelcast.core.HazelcastInstance;<% } %><% if (clusteredHttpSession == 'hazelcast') { %> import com.hazelcast.web.SessionListener; import com.hazelcast.web.spring.SpringAwareWebFilter;<% } %><% if (!skipClient) { %> import <%=packageName%>.web.filter.CachingHttpHeadersFilter;<% } %> import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.embedded.ConfigurableEmbeddedServletContainer; import org.springframework.boot.context.embedded.EmbeddedServletContainerCustomizer; import org.springframework.boot.context.embedded.MimeMappings; import org.springframework.boot.context.embedded.ServletContextInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; import org.springframework.web.cors.CorsConfiguration; import org.springframework.web.cors.UrlBasedCorsConfigurationSource; import org.springframework.web.filter.CorsFilter; <% if (!skipClient) { %> import java.io.File;<% } %> import java.util.*; import javax.inject.Inject; import javax.servlet.*; /** * Configuration of web application with Servlet 3.0 APIs. */ @Configuration public class WebConfigurer implements ServletContextInitializer, EmbeddedServletContainerCustomizer { private final Logger log = LoggerFactory.getLogger(WebConfigurer.class); @Inject private Environment env; @Inject private JHipsterProperties jHipsterProperties; @Autowired(required = false) private MetricRegistry metricRegistry;<% if (hibernateCache == 'hazelcast') { %> // Hazelcast instance is injected to force its initialization before the Servlet filter uses it. @Inject private HazelcastInstance hazelcastInstance;<% } %> @Override public void onStartup(ServletContext servletContext) throws ServletException { log.info("Web application configuration, using profiles: {}", Arrays.toString(env.getActiveProfiles())); EnumSet<DispatcherType> disps = EnumSet.of(DispatcherType.REQUEST, DispatcherType.FORWARD, DispatcherType.ASYNC);<% if (clusteredHttpSession == 'hazelcast') { %> initClusteredHttpSessionFilter(servletContext, disps);<% } %> initMetrics(servletContext, disps);<% if (!skipClient) { %> if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { initCachingHttpHeadersFilter(servletContext, disps); }<% } %><% if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { %> if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { initH2Console(servletContext); }<% } %> log.info("Web application fully configured"); }<% if (clusteredHttpSession == 'hazelcast') { %> /** * Initializes the Clustered Http Session filter */ private void initClusteredHttpSessionFilter(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Registering Clustered Http Session Filter"); servletContext.addListener(new SessionListener()); FilterRegistration.Dynamic hazelcastWebFilter = servletContext.addFilter("hazelcastWebFilter", new SpringAwareWebFilter()); Map<String, String> parameters = new HashMap<>(); parameters.put("instance-name", "<%=baseName%>"); // Name of the distributed map storing your web session objects parameters.put("map-name", "clustered-http-sessions"); // How is your load-balancer configured? // Setting "sticky-session" to "true" means all requests of a session // are routed to the node where the session is first created. // This is excellent for performance. // If "sticky-session" is set to "false", then when a session is updated // on a node, entries for this session on all other nodes are invalidated. // You have to know how your load-balancer is configured before // setting this parameter. Default is true. parameters.put("sticky-session", "true"); // Name of session id cookie parameters.put("cookie-name", "hazelcast.sessionId"); // Are you debugging? Default is false. if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { parameters.put("debug", "false"); } else { parameters.put("debug", "true"); } // Do you want to shutdown HazelcastInstance during // web application undeploy process? // Default is true. parameters.put("shutdown-on-destroy", "true"); hazelcastWebFilter.setInitParameters(parameters); hazelcastWebFilter.addMappingForUrlPatterns(disps, true, "/*"); hazelcastWebFilter.setAsyncSupported(true); }<% } %> /** * Set up Mime types. */ @Override public void customize(ConfigurableEmbeddedServletContainer container) { MimeMappings mappings = new MimeMappings(MimeMappings.DEFAULT); // IE issue, see https://github.com/jhipster/generator-jhipster/pull/711 mappings.add("html", "text/html;charset=utf-8"); // CloudFoundry issue, see https://github.com/cloudfoundry/gorouter/issues/64 mappings.add("json", "text/html;charset=utf-8"); container.setMimeMappings(mappings);<% if (!skipClient) { %> // Set document root if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { container.setDocumentRoot(new File("<%= CLIENT_DIST_DIR %>")); } else if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { container.setDocumentRoot(new File("<%= CLIENT_MAIN_SRC_DIR %>")); }<% } %> }<% if (!skipClient) { %> /** * Initializes the caching HTTP Headers Filter. */ private void initCachingHttpHeadersFilter(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Registering Caching HTTP Headers Filter"); FilterRegistration.Dynamic cachingHttpHeadersFilter = servletContext.addFilter("cachingHttpHeadersFilter", new CachingHttpHeadersFilter(jHipsterProperties)); cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/content/*"); cachingHttpHeadersFilter.addMappingForUrlPatterns(disps, true, "/app/*"); cachingHttpHeadersFilter.setAsyncSupported(true); }<% } %> /** * Initializes Metrics. */ private void initMetrics(ServletContext servletContext, EnumSet<DispatcherType> disps) { log.debug("Initializing Metrics registries"); servletContext.setAttribute(InstrumentedFilter.REGISTRY_ATTRIBUTE, metricRegistry); servletContext.setAttribute(MetricsServlet.METRICS_REGISTRY, metricRegistry); log.debug("Registering Metrics Filter"); FilterRegistration.Dynamic metricsFilter = servletContext.addFilter("webappMetricsFilter", new InstrumentedFilter()); metricsFilter.addMappingForUrlPatterns(disps, true, "/*"); metricsFilter.setAsyncSupported(true); log.debug("Registering Metrics Servlet"); ServletRegistration.Dynamic metricsAdminServlet = servletContext.addServlet("metricsServlet", new MetricsServlet()); metricsAdminServlet.addMapping("/metrics/metrics/*"); metricsAdminServlet.setAsyncSupported(true); metricsAdminServlet.setLoadOnStartup(2); } @Bean public CorsFilter corsFilter() { UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); CorsConfiguration config = jHipsterProperties.getCors(); if (config.getAllowedOrigins() != null && !config.getAllowedOrigins().isEmpty()) { source.registerCorsConfiguration("/api/**", config); source.registerCorsConfiguration("/v2/api-docs", config); source.registerCorsConfiguration("/oauth/**", config); } return new CorsFilter(source); }<% if (devDatabaseType == 'h2Disk' || devDatabaseType == 'h2Memory') { %> /** * Initializes H2 console */ private void initH2Console(ServletContext servletContext) { log.debug("Initialize H2 console"); ServletRegistration.Dynamic h2ConsoleServlet = servletContext.addServlet("H2Console", new org.h2.server.web.WebServlet()); h2ConsoleServlet.addMapping("/h2-console/*"); h2ConsoleServlet.setInitParameter("-properties", "<%= SERVER_MAIN_RES_DIR %>"); h2ConsoleServlet.setLoadOnStartup(1); }<% } %> }
Only do setDocumentRoot if not running from jar/war
generators/server/templates/src/main/java/package/config/_WebConfigurer.java
Only do setDocumentRoot if not running from jar/war
<ide><path>enerators/server/templates/src/main/java/package/config/_WebConfigurer.java <ide> // CloudFoundry issue, see https://github.com/cloudfoundry/gorouter/issues/64 <ide> mappings.add("json", "text/html;charset=utf-8"); <ide> container.setMimeMappings(mappings);<% if (!skipClient) { %> <del> // Set document root <del> if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { <del> container.setDocumentRoot(new File("<%= CLIENT_DIST_DIR %>")); <del> } else if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { <del> container.setDocumentRoot(new File("<%= CLIENT_MAIN_SRC_DIR %>")); <add> <add> // Set document root if we're not running from a jar/war <add> if (getClass().getProtectionDomain().getCodeSource().getLocation().getProtocol().equals("file")) { <add> if (env.acceptsProfiles(Constants.SPRING_PROFILE_PRODUCTION)) { <add> container.setDocumentRoot(new File("<%= CLIENT_DIST_DIR %>")); <add> } else if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { <add> container.setDocumentRoot(new File("<%= CLIENT_MAIN_SRC_DIR %>")); <add> } <ide> }<% } %> <ide> }<% if (!skipClient) { %> <ide>
Java
bsd-2-clause
8c19f745045ab702acf90d6d5df845827f461103
0
clementval/claw-compiler,clementval/claw-compiler
/* * This file is released under terms of BSD license * See LICENSE file for more information */ package cx2x.translator.transformation.loop; // Cx2x import import cx2x.translator.common.Constant; import cx2x.translator.language.ClawLanguage; import cx2x.translator.language.ClawMapping; import cx2x.translator.language.ClawMappingVar; import cx2x.xcodeml.helper.*; import cx2x.xcodeml.xelement.*; import cx2x.xcodeml.transformation.*; import cx2x.xcodeml.exception.*; // OMNI import import xcodeml.util.XmOption; // Java import import java.util.*; /** * A LoopExtraction transformation is an independent transformation. The * transformation consists of locating a loop in a function call and extract it. * This loop is then wrapped around the function call and the parameters are * demoted accordingly to the mapping options. * * @author clementval */ public class LoopExtraction extends Transformation<LoopExtraction> { private Map<String, ClawMapping> _fctMappingMap = null; private Map<String, ClawMapping> _argMappingMap = null; private XfunctionCall _fctCall = null; private XfunctionDefinition _fctDef = null; // Fct holding the fct call private XfunctionDefinition _fctDefToExtract = null; private XdoStatement _extractedLoop = null; /** * Constructs a new LoopExtraction triggered from a specific pragma. * @param directive The directive that triggered the loop extraction * transformation. * @throws IllegalDirectiveException if something is wrong in the directive's * options */ public LoopExtraction(ClawLanguage directive) throws IllegalDirectiveException { super(directive); _argMappingMap = new Hashtable<>(); _fctMappingMap = new Hashtable<>(); try { extractMappingInformation(); } catch (IllegalDirectiveException ide){ ide.setDirectiveLine(directive.getPragma().getLineNo()); throw ide; } } /** * Extract all mapping information from the pragma data. Each * map(<mapped>:<mapping>) produces a ClawMapping object. */ private void extractMappingInformation() throws IllegalDirectiveException { for(ClawMapping m : _directive.getMappings()){ for(ClawMappingVar mappedVar : m.getMappedVariables()){ if(_argMappingMap.containsKey(mappedVar.getArgMapping())){ throw new IllegalDirectiveException(_directive.getPragma().getValue(), mappedVar + " appears more than once in the mapping"); } else { _argMappingMap.put(mappedVar.getArgMapping(), m); } if(_fctMappingMap.containsKey(mappedVar.getFctMapping())){ throw new IllegalDirectiveException(_directive.getPragma().getValue(), mappedVar + " appears more than once in the mapping"); } else { _fctMappingMap.put(mappedVar.getFctMapping(), m); } } } } /** * Check whether the provided mapping information are correct or not. A * mapped variable should only appear once. Mapped variable must be parameters * in the function definition. * Mapping using the same mapping variables are merged together. * @return True if all the conditions are respected. False otherwise. */ private boolean checkMappingInformation(XcodeProgram xcodeml){ for(Map.Entry<String, ClawMapping> map : _argMappingMap.entrySet()){ if(_fctCall.getArgumentsTable().findArgument(map.getKey()) == null){ xcodeml.addError("Mapped variable " + map.getKey() + " not found in function call arguments", _directive.getPragma().getLineNo()); return false; } } return true; } /** * * @param xcodeml The XcodeML on which the transformations are applied. * @param transformer The transformer used to applied the transformations. * @return True if the transformation analysis succeeded. False otherwise. */ public boolean analyze(XcodeProgram xcodeml, Transformer transformer){ XexprStatement _exprStmt = XelementHelper.findNextExprStatement(_directive.getPragma()); if(_exprStmt == null){ xcodeml.addError("No function call detected after loop-extract", _directive.getPragma().getLineNo()); return false; } // Find function CALL _fctCall = XelementHelper.findFctCall(_exprStmt); if(_fctCall == null){ xcodeml.addError("No function call detected after loop-extract", _directive.getPragma().getLineNo()); return false; } _fctDef = XelementHelper.findParentFctDef(_fctCall); if(_fctDef == null){ xcodeml.addError("No function around the fct call", _directive.getPragma().getLineNo()); return false; } // Find function declaration _fctDefToExtract = XelementHelper.findFunctionDefinition(xcodeml, _fctCall); if(_fctDefToExtract == null){ xcodeml.addError("Could not locate the function definition for: " + _fctCall.getName().getValue(), _directive.getPragma().getLineNo()); return false; } // Find the loop to be extracted try { _extractedLoop = locateDoStatement(_fctDefToExtract); } catch (IllegalTransformationException itex){ xcodeml.addError(itex.getMessage(), _directive.getPragma().getLineNo()); return false; } return checkMappingInformation(xcodeml); } /** * Apply the transformation. A loop extraction is applied in the following * steps: * 1) Duplicate the function targeted by the transformation * 2) Extract the loop body in the duplicated function and remove the loop. * 3) Adapt function call and demote array references in the duplicated * function body. * 4) Optional: Add a LoopFusion transformation to the transformaions' queue. * * @param xcodeml The XcodeML on which the transformations are applied. * @param transformer The transformer used to applied the transformations. * @param other Only for dependent transformation. The other * transformation part of the transformation. * @throws IllegalTransformationException if the transformation cannot be * applied. */ public void transform(XcodeProgram xcodeml, Transformer transformer, LoopExtraction other) throws Exception { /* * DUPLICATE THE FUNCTION */ // Duplicate function definition XfunctionDefinition clonedFctDef = _fctDefToExtract.cloneObject(); String newFctTypeHash = xcodeml.getTypeTable().generateFctTypeHash(); String newFctName = clonedFctDef.getName().getValue() + Constant.EXTRACTION_SUFFIX + transformer.getNextTransformationCounter(); clonedFctDef.getName().setValue(newFctName); clonedFctDef.getName().setType(newFctTypeHash); // Update the symbol table in the fct definition Xid fctId = clonedFctDef.getSymbolTable() .get(_fctDefToExtract.getName().getValue()); fctId.setType(newFctTypeHash); fctId.setName(newFctName); // Get the fctType in typeTable XfunctionType fctType = (XfunctionType)xcodeml .getTypeTable().get(_fctDefToExtract.getName().getType()); XfunctionType newFctType = fctType.cloneObject(); newFctType.setType(newFctTypeHash); xcodeml.getTypeTable().add(newFctType); // Get the id from the global symbols table Xid globalFctId = xcodeml.getGlobalSymbolsTable() .get(_fctDefToExtract.getName().getValue()); // If the fct is define in the global symbol table, duplicate it if(globalFctId != null){ Xid newFctId = globalFctId.cloneObject(); newFctId.setType(newFctTypeHash); newFctId.setName(newFctName); xcodeml.getGlobalSymbolsTable().add(newFctId); } // Insert the duplicated function declaration XelementHelper.insertAfter(_fctDefToExtract, clonedFctDef); // Find the loop that will be extracted XdoStatement loopInClonedFct = locateDoStatement(clonedFctDef); if(XmOption.isDebugOutput()){ System.out.println("loop-extract transformation: " + _directive.getPragma().getValue()); System.out.println(" created subroutine: " + clonedFctDef.getName().getValue()); } /* * REMOVE BODY FROM THE LOOP AND DELETE THE LOOP */ // 1. append body into fct body after loop XelementHelper.extractBody(loopInClonedFct); // 2. delete loop loopInClonedFct.delete(); /* * ADAPT FUNCTION CALL AND DEMOTE ARRAY REFERENCES IN THE BODY * OF THE FUNCTION */ // Wrap function call with loop XdoStatement extractedLoop = wrapCallWithLoop(xcodeml, _extractedLoop.getIterationRange()); if(XmOption.isDebugOutput()){ System.out.println(" call wrapped with loop: " + _fctCall.getName().getValue() + " --> " + clonedFctDef.getName().getValue()); } // Change called fct name _fctCall.getName().setValue(newFctName); _fctCall.getName().setType(newFctTypeHash); // Adapt function call parameters and function declaration XargumentsTable args = _fctCall.getArgumentsTable(); XdeclTable fctDeclarations = clonedFctDef.getDeclarationTable(); XsymbolTable fctSymbols = clonedFctDef.getSymbolTable(); if(XmOption.isDebugOutput()){ System.out.println(" Start to apply mapping: " + _directive.getMappings().size()); } for(ClawMapping mapping : _directive.getMappings()){ System.out.println("Apply mapping (" + mapping.getMappedDimensions() + ") "); for(ClawMappingVar var : mapping.getMappedVariables()){ System.out.println(" Var: " + var); XexprModel argument = args.findArgument(var.getArgMapping()); if(argument == null) { continue; } /* Case 1: Var --> ArrayRef * Var --> ArrayRef transformation * 1. Check that the variable used as array index exists in the * current scope (XdeclTable). If so, get its type value. Create a * Var element for the arrayIndex. Create the arrayIndex element * with Var as child. * * 2. Get the reference type of the base variable. * 2.1 Create the varRef element with the type of base variable * 2.2 insert clone of base variable in varRef * 3. Create arrayRef element with varRef + arrayIndex */ if(argument.isVar()){ Xvar varArg = argument.getVar(); System.out.println(" arg found: " + varArg.getType()); XbasicType type = (XbasicType)xcodeml.getTypeTable().get(varArg.getType()); System.out.println(" ref: " + type.getRef()); System.out.println(" dimensions: " + type.getDimensions()); // Demotion cannot be applied as type dimension is smaller if(type.getDimensions() < mapping.getMappedDimensions()){ throw new IllegalTransformationException( "mapping dimensions too big. Mapping " + mapping.toString() + " is wrong ...", _directive.getPragma().getLineNo()); } XarrayRef newArg = XelementHelper.createEmpty(XarrayRef.class, xcodeml); newArg.setType(type.getRef()); XvarRef varRef = XelementHelper.createEmpty(XvarRef.class, xcodeml); varRef.setType(varArg.getType()); varRef.append(varArg, true); newArg.append(varRef); // create arrayIndex for(ClawMappingVar mappingVar : mapping.getMappingVariables()){ XarrayIndex arrayIndex = XelementHelper. createEmpty(XarrayIndex.class, xcodeml); // Find the mapping var in the local table (fct scope) XvarDecl mappingVarDecl = _fctDef.getDeclarationTable().get(mappingVar.getArgMapping()); // Add to arrayIndex Xvar newMappingVar = XelementHelper.createEmpty(Xvar.class, xcodeml); newMappingVar.setScope(Xscope.LOCAL); newMappingVar.setType(mappingVarDecl.getName().getType()); newMappingVar.setValue(mappingVarDecl.getName().getValue()); arrayIndex.append(newMappingVar); newArg.append(arrayIndex); } args.replace(varArg, newArg); } // Case 2: ArrayRef (n arrayIndex) --> ArrayRef (n+m arrayIndex) else if (argument.isArrayRef()){ XarrayRef arraRef = argument.getArrayRef(); // TODO } // Change variable declaration in extracted fct XvarDecl varDecl = fctDeclarations.get(var.getFctMapping()); Xid id = fctSymbols.get(var.getFctMapping()); XbasicType varDeclType = (XbasicType)xcodeml.getTypeTable().get(varDecl.getName().getType()); // Case 1: variable is demoted to scalar then take the ref type if(varDeclType.getDimensions() == mapping.getMappedDimensions()){ Xname tempName = XelementHelper.createEmpty(Xname.class, xcodeml); tempName.setValue(var.getFctMapping()); tempName.setType(varDeclType.getRef()); XvarDecl newVarDecl = XelementHelper.createEmpty(XvarDecl.class, xcodeml); newVarDecl.append(tempName); fctDeclarations.replace(newVarDecl); id.setType(varDeclType.getRef()); } else { // Case 2: variable is not totally demoted then create new type // TODO } } // Loop mapped variables } // Loop over mapping clauses // Adapt array reference in function body List<XarrayRef> arrayReferences = XelementHelper.getAllArrayReferences(clonedFctDef.getBody()); for(XarrayRef ref : arrayReferences){ if(!ref.getVarRef().isVar()){ continue; } String mappedVar = ref.getVarRef().getVar().getValue(); if(_fctMappingMap.containsKey(mappedVar)){ ClawMapping mapping = _fctMappingMap.get(mappedVar); boolean changeRef = true; int mappingIndex = 0; for(XbaseElement e : ref.getInnerElements()){ if(e instanceof XarrayIndex){ XarrayIndex arrayIndex = (XarrayIndex)e; if(arrayIndex.getExprModel() != null && arrayIndex.getExprModel().isVar()){ String varName = arrayIndex.getExprModel().getVar().getValue(); if(varName.equals(mapping.getMappingVariables().get(mappingIndex).getFctMapping())){ ++mappingIndex; } else { changeRef = false; } } } } if(changeRef){ // TODO Var ref should be extracted only if the reference can be // totally demoted XelementHelper.insertBefore(ref, ref.getVarRef().getVar().cloneObject()); ref.delete(); } } } // Wrap with parallel section if option is set if(_directive.hasParallelOption()){ Xpragma parallelStart = XelementHelper.createEmpty(Xpragma.class, xcodeml); parallelStart.setData("acc parallel"); Xpragma parallelEnd = XelementHelper.createEmpty(Xpragma.class, xcodeml); parallelEnd.setData("acc end parallel"); XelementHelper.insertAfter(_directive.getPragma(), parallelStart); XelementHelper.insertAfter(extractedLoop, parallelEnd); if(_directive.hasAccOption()){ insertAccOption(parallelStart, xcodeml); } } else if (_directive.hasAccOption()){ insertAccOption(_directive.getPragma(), xcodeml); } // Transformation is done. Add additional transfomation here if(_directive.hasFusionOption()){ LoopFusion fusion = new LoopFusion(extractedLoop, _directive.getGroupName(), _directive.getPragma().getLineNo()); transformer.addTransformation(fusion); if(XmOption.isDebugOutput()){ System.out.println("Loop fusion added: " + _directive.getGroupName()); } } this.transformed(); } /** * Try to find a do statement matching the range of loop-extract. * @param from XbaseElement to search from. Search is performed in its * children. * @return A XdoStatement object that match the range of loop-extract. * @throws IllegalTransformationException */ private XdoStatement locateDoStatement(XbaseElement from) throws IllegalTransformationException { XdoStatement foundStatement = XelementHelper.findDoStatement(from, true); if(foundStatement == null){ throw new IllegalTransformationException("No loop found in function", _directive.getPragma().getLineNo()); } else { if(!_directive.getRange().equals(foundStatement.getIterationRange())) { // Try to find another loops that meet the criteria do { foundStatement = XelementHelper.findNextDoStatement(foundStatement); } while (foundStatement != null && !_directive.getRange().equals(foundStatement.getIterationRange())); } } if(foundStatement == null){ throw new IllegalTransformationException("No loop found in function", _directive.getPragma().getLineNo()); } if(!_directive.getRange().equals(foundStatement.getIterationRange())) { throw new IllegalTransformationException( "Iteration range is different than the loop to be extracted", _directive.getPragma().getLineNo() ); } return foundStatement; } /** * Create a new pragma statement and insert it after the insert point * @param insertPoint Statement just before the insertion * @param xcodeml The XcodeML representation. */ private void insertAccOption(Xpragma insertPoint, XcodeProgram xcodeml) throws IllegalTransformationException { Xpragma accAdditionalOption = XelementHelper. createEmpty(Xpragma.class, xcodeml); accAdditionalOption.setData(Constant.OPENACC_PREFIX + " " + _directive.getAccClauses()); XelementHelper.insertAfter(insertPoint, accAdditionalOption); } /** * Wrap a function call with a do statement. * @param xcodeml The XcodeML representation. * @param iterationRange Iteration range to be applied to the do statement. * @return The created do statement. */ private XdoStatement wrapCallWithLoop(XcodeProgram xcodeml, XloopIterationRange iterationRange) throws Exception { // Create a new empty loop XdoStatement loop = XelementHelper.createWithEmptyBody(xcodeml, iterationRange); // Insert the new empty loop just after the pragma XelementHelper.insertAfter(_directive.getPragma(), loop); // Move the call into the loop body XelementHelper.insertFctCallIntoLoop(loop, _fctCall); insertDeclaration(iterationRange.getInductionVar().getValue()); if(iterationRange.getIndexRange().getLowerBound().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getLowerBound().getValue()); } if(iterationRange.getIndexRange().getUpperBound().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getUpperBound().getValue()); } if(iterationRange.getIndexRange().getStep().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getStep().getValue()); } return loop; } /** * Insert new declaration in the function definition. * @param id The id used for insertion. */ private void insertDeclaration(String id){ Xid inductionVarId = _fctDef.getSymbolTable().get(id); if(inductionVarId == null){ Xid copyId = _fctDefToExtract.getSymbolTable().get(id); _fctDef.getSymbolTable().add(copyId); } XvarDecl inductionVarDecl = _fctDef.getDeclarationTable().get(id); if(inductionVarDecl == null){ XvarDecl copyDecl = _fctDefToExtract.getDeclarationTable().get(id); _fctDef.getDeclarationTable().add(copyDecl); } } /** * @see Transformation#canBeTransformedWith(Object) * @return Always false as independent transformation are applied one by one. */ public boolean canBeTransformedWith(LoopExtraction other) { return false; // Always false as independent transformation } }
omni-cx2x/src/cx2x/translator/transformation/loop/LoopExtraction.java
/* * This file is released under terms of BSD license * See LICENSE file for more information */ package cx2x.translator.transformation.loop; // Cx2x import import cx2x.translator.common.Constant; import cx2x.translator.language.ClawLanguage; import cx2x.translator.language.ClawMapping; import cx2x.translator.language.ClawMappingVar; import cx2x.translator.language.ClawRange; import cx2x.xcodeml.helper.*; import cx2x.xcodeml.xelement.*; import cx2x.xcodeml.transformation.*; import cx2x.xcodeml.exception.*; // OMNI import import xcodeml.util.XmOption; // Java import import java.util.*; /** * A LoopExtraction transformation is an independent transformation. The * transformation consists of locating a loop in a function call and extract it. * This loop is then wrapped around the function call and the parameters are * demoted accordingly to the mapping options. * * @author clementval */ public class LoopExtraction extends Transformation<LoopExtraction> { private Map<String, ClawMapping> _fctMappingMap = null; private Map<String, ClawMapping> _argMappingMap = null; private XfunctionCall _fctCall = null; private XfunctionDefinition _fctDef = null; // Fct holding the fct call private XfunctionDefinition _fctDefToExtract = null; private XdoStatement _extractedLoop = null; /** * Constructs a new LoopExtraction triggered from a specific pragma. * @param directive The directive that triggered the loop extraction * transformation. * @throws IllegalDirectiveException if something is wrong in the directive's * options */ public LoopExtraction(ClawLanguage directive) throws IllegalDirectiveException { super(directive); _argMappingMap = new Hashtable<>(); _fctMappingMap = new Hashtable<>(); try { extractMappingInformation(); } catch (IllegalDirectiveException ide){ ide.setDirectiveLine(directive.getPragma().getLineNo()); throw ide; } } /** * Extract all mapping information from the pragma data. Each * map(<mapped>:<mapping>) produces a ClawMapping object. */ private void extractMappingInformation() throws IllegalDirectiveException { for(ClawMapping m : _directive.getMappings()){ for(ClawMappingVar mappedVar : m.getMappedVariables()){ if(_argMappingMap.containsKey(mappedVar.getArgMapping())){ throw new IllegalDirectiveException(_directive.getPragma().getValue(), mappedVar + " appears more than once in the mapping"); } else { _argMappingMap.put(mappedVar.getArgMapping(), m); } if(_fctMappingMap.containsKey(mappedVar.getFctMapping())){ throw new IllegalDirectiveException(_directive.getPragma().getValue(), mappedVar + " appears more than once in the mapping"); } else { _fctMappingMap.put(mappedVar.getFctMapping(), m); } } } } /** * Check whether the provided mapping information are correct or not. A * mapped variable should only appear once. Mapped variable must be parameters * in the function definition. * Mapping using the same mapping variables are merged together. * @return True if all the conditions are respected. False otherwise. */ private boolean checkMappingInformation(XcodeProgram xcodeml){ for(Map.Entry<String, ClawMapping> map : _argMappingMap.entrySet()){ if(_fctCall.getArgumentsTable().findArgument(map.getKey()) == null){ xcodeml.addError("Mapped variable " + map.getKey() + " not found in function call arguments", _directive.getPragma().getLineNo()); return false; } } return true; } /** * * @param xcodeml The XcodeML on which the transformations are applied. * @param transformer The transformer used to applied the transformations. * @return True if the transformation analysis succeeded. False otherwise. */ public boolean analyze(XcodeProgram xcodeml, Transformer transformer){ XexprStatement _exprStmt = XelementHelper.findNextExprStatement(_directive.getPragma()); if(_exprStmt == null){ xcodeml.addError("No function call detected after loop-extract", _directive.getPragma().getLineNo()); return false; } // Find function CALL _fctCall = XelementHelper.findFctCall(_exprStmt); if(_fctCall == null){ xcodeml.addError("No function call detected after loop-extract", _directive.getPragma().getLineNo()); return false; } _fctDef = XelementHelper.findParentFctDef(_fctCall); if(_fctDef == null){ xcodeml.addError("No function around the fct call", _directive.getPragma().getLineNo()); return false; } // Find function declaration _fctDefToExtract = XelementHelper.findFunctionDefinition(xcodeml, _fctCall); if(_fctDefToExtract == null){ xcodeml.addError("Could not locate the function definition for: " + _fctCall.getName().getValue(), _directive.getPragma().getLineNo()); return false; } // Find the loop to be extracted try { _extractedLoop = locateDoStatement(_fctDefToExtract); } catch (IllegalTransformationException itex){ xcodeml.addError(itex.getMessage(), _directive.getPragma().getLineNo()); return false; } return checkMappingInformation(xcodeml); } /** * Apply the transformation. A loop extraction is applied in the following * steps: * 1) Duplicate the function targeted by the transformation * 2) Extract the loop body in the duplicated function and remove the loop. * 3) Adapt function call and demote array references in the duplicated * function body. * 4) Optional: Add a LoopFusion transformation to the transformaions' queue. * * @param xcodeml The XcodeML on which the transformations are applied. * @param transformer The transformer used to applied the transformations. * @param other Only for dependent transformation. The other * transformation part of the transformation. * @throws IllegalTransformationException if the transformation cannot be * applied. */ public void transform(XcodeProgram xcodeml, Transformer transformer, LoopExtraction other) throws Exception { /* * DUPLICATE THE FUNCTION */ // Duplicate function definition XfunctionDefinition clonedFctDef = _fctDefToExtract.cloneObject(); String newFctTypeHash = xcodeml.getTypeTable().generateFctTypeHash(); String newFctName = clonedFctDef.getName().getValue() + Constant.EXTRACTION_SUFFIX + transformer.getNextTransformationCounter(); clonedFctDef.getName().setValue(newFctName); clonedFctDef.getName().setType(newFctTypeHash); // Update the symbol table in the fct definition Xid fctId = clonedFctDef.getSymbolTable() .get(_fctDefToExtract.getName().getValue()); fctId.setType(newFctTypeHash); fctId.setName(newFctName); // Get the fctType in typeTable XfunctionType fctType = (XfunctionType)xcodeml .getTypeTable().get(_fctDefToExtract.getName().getType()); XfunctionType newFctType = fctType.cloneObject(); newFctType.setType(newFctTypeHash); xcodeml.getTypeTable().add(newFctType); // Get the id from the global symbols table Xid globalFctId = xcodeml.getGlobalSymbolsTable() .get(_fctDefToExtract.getName().getValue()); // If the fct is define in the global symbol table, duplicate it if(globalFctId != null){ Xid newFctId = globalFctId.cloneObject(); newFctId.setType(newFctTypeHash); newFctId.setName(newFctName); xcodeml.getGlobalSymbolsTable().add(newFctId); } // Insert the duplicated function declaration XelementHelper.insertAfter(_fctDefToExtract, clonedFctDef); // Find the loop that will be extracted XdoStatement loopInClonedFct = locateDoStatement(clonedFctDef); if(XmOption.isDebugOutput()){ System.out.println("loop-extract transformation: " + _directive.getPragma().getValue()); System.out.println(" created subroutine: " + clonedFctDef.getName().getValue()); } /* * REMOVE BODY FROM THE LOOP AND DELETE THE LOOP */ // 1. append body into fct body after loop XelementHelper.extractBody(loopInClonedFct); // 2. delete loop loopInClonedFct.delete(); /* * ADAPT FUNCTION CALL AND DEMOTE ARRAY REFERENCES IN THE BODY * OF THE FUNCTION */ // Wrap function call with loop XdoStatement extractedLoop = wrapCallWithLoop(xcodeml, _extractedLoop.getIterationRange()); if(XmOption.isDebugOutput()){ System.out.println(" call wrapped with loop: " + _fctCall.getName().getValue() + " --> " + clonedFctDef.getName().getValue()); } // Change called fct name _fctCall.getName().setValue(newFctName); _fctCall.getName().setType(newFctTypeHash); // Adapt function call parameters and function declaration XargumentsTable args = _fctCall.getArgumentsTable(); XdeclTable fctDeclarations = clonedFctDef.getDeclarationTable(); XsymbolTable fctSymbols = clonedFctDef.getSymbolTable(); if(XmOption.isDebugOutput()){ System.out.println(" Start to apply mapping: " + _directive.getMappings().size()); } for(ClawMapping mapping : _directive.getMappings()){ System.out.println("Apply mapping (" + mapping.getMappedDimensions() + ") "); for(ClawMappingVar var : mapping.getMappedVariables()){ System.out.println(" Var: " + var); XexprModel argument = args.findArgument(var.getArgMapping()); if(argument == null) { continue; } /* Case 1: Var --> ArrayRef * Var --> ArrayRef transformation * 1. Check that the variable used as array index exists in the * current scope (XdeclTable). If so, get its type value. Create a * Var element for the arrayIndex. Create the arrayIndex element * with Var as child. * * 2. Get the reference type of the base variable. * 2.1 Create the varRef element with the type of base variable * 2.2 insert clone of base variable in varRef * 3. Create arrayRef element with varRef + arrayIndex */ if(argument.isVar()){ Xvar varArg = argument.getVar(); System.out.println(" arg found: " + varArg.getType()); XbasicType type = (XbasicType)xcodeml.getTypeTable().get(varArg.getType()); System.out.println(" ref: " + type.getRef()); System.out.println(" dimensions: " + type.getDimensions()); // Demotion cannot be applied as type dimension is smaller if(type.getDimensions() < mapping.getMappedDimensions()){ throw new IllegalTransformationException( "mapping dimensions too big. Mapping " + mapping.toString() + " is wrong ...", _directive.getPragma().getLineNo()); } XarrayRef newArg = XelementHelper.createEmpty(XarrayRef.class, xcodeml); newArg.setType(type.getRef()); XvarRef varRef = XelementHelper.createEmpty(XvarRef.class, xcodeml); varRef.setType(varArg.getType()); varRef.append(varArg, true); newArg.append(varRef); // create arrayIndex for(ClawMappingVar mappingVar : mapping.getMappingVariables()){ XarrayIndex arrayIndex = XelementHelper. createEmpty(XarrayIndex.class, xcodeml); // Find the mapping var in the local table (fct scope) XvarDecl mappingVarDecl = _fctDef.getDeclarationTable().get(mappingVar.getArgMapping()); // Add to arrayIndex Xvar newMappingVar = XelementHelper.createEmpty(Xvar.class, xcodeml); newMappingVar.setScope(Xscope.LOCAL); newMappingVar.setType(mappingVarDecl.getName().getType()); newMappingVar.setValue(mappingVarDecl.getName().getValue()); arrayIndex.append(newMappingVar); newArg.append(arrayIndex); } args.replace(varArg, newArg); } // Case 2: ArrayRef (n arrayIndex) --> ArrayRef (n+m arrayIndex) else if (argument.isArrayRef()){ XarrayRef arraRef = argument.getArrayRef(); // TODO } // Change variable declaration in extracted fct XvarDecl varDecl = fctDeclarations.get(var.getFctMapping()); Xid id = fctSymbols.get(var.getFctMapping()); XbasicType varDeclType = (XbasicType)xcodeml.getTypeTable().get(varDecl.getName().getType()); // Case 1: variable is demoted to scalar then take the ref type if(varDeclType.getDimensions() == mapping.getMappedDimensions()){ Xname tempName = XelementHelper.createEmpty(Xname.class, xcodeml); tempName.setValue(var.getFctMapping()); tempName.setType(varDeclType.getRef()); XvarDecl newVarDecl = XelementHelper.createEmpty(XvarDecl.class, xcodeml); newVarDecl.append(tempName); fctDeclarations.replace(newVarDecl); id.setType(varDeclType.getRef()); } else { // Case 2: variable is not totally demoted then create new type // TODO } } // Loop mapped variables } // Loop over mapping clauses // Adapt array reference in function body List<XarrayRef> arrayReferences = XelementHelper.getAllArrayReferences(clonedFctDef.getBody()); for(XarrayRef ref : arrayReferences){ if(!ref.getVarRef().isVar()){ continue; } String mappedVar = ref.getVarRef().getVar().getValue(); if(_fctMappingMap.containsKey(mappedVar)){ ClawMapping mapping = _fctMappingMap.get(mappedVar); boolean changeRef = true; int mappingIndex = 0; for(XbaseElement e : ref.getInnerElements()){ if(e instanceof XarrayIndex){ XarrayIndex arrayIndex = (XarrayIndex)e; if(arrayIndex.getExprModel() != null && arrayIndex.getExprModel().isVar()){ String varName = arrayIndex.getExprModel().getVar().getValue(); if(varName.equals(mapping.getMappingVariables().get(mappingIndex).getFctMapping())){ ++mappingIndex; } else { changeRef = false; } } } } if(changeRef){ // TODO Var ref should be extracted only if the reference can be // totally demoted XelementHelper.insertBefore(ref, ref.getVarRef().getVar().cloneObject()); ref.delete(); } } } // Wrap with parallel section if option is set if(_directive.hasParallelOption()){ Xpragma parallelStart = XelementHelper.createEmpty(Xpragma.class, xcodeml); parallelStart.setData("acc parallel"); Xpragma parallelEnd = XelementHelper.createEmpty(Xpragma.class, xcodeml); parallelEnd.setData("acc end parallel"); XelementHelper.insertAfter(_directive.getPragma(), parallelStart); XelementHelper.insertAfter(extractedLoop, parallelEnd); if(_directive.hasAccOption()){ insertAccOption(parallelStart, xcodeml); } } else if (_directive.hasAccOption()){ insertAccOption(_directive.getPragma(), xcodeml); } // Transformation is done. Add additional transfomation here if(_directive.hasFusionOption()){ LoopFusion fusion = new LoopFusion(extractedLoop, _directive.getGroupName(), _directive.getPragma().getLineNo()); transformer.addTransformation(fusion); if(XmOption.isDebugOutput()){ System.out.println("Loop fusion added: " + _directive.getGroupName()); } } this.transformed(); } /** * Try to find a do statement matching the range of loop-extract. * @param from XbaseElement to search from. Search is performed in its * children. * @return A XdoStatement object that match the range of loop-extract. * @throws IllegalTransformationException */ private XdoStatement locateDoStatement(XbaseElement from) throws IllegalTransformationException { XdoStatement foundStatement = XelementHelper.findDoStatement(from, true); if(foundStatement == null){ throw new IllegalTransformationException("No loop found in function", _directive.getPragma().getLineNo()); } else { if(!_directive.getRange().equals(foundStatement.getIterationRange())) { // Try to find another loops that meet the criteria do { foundStatement = XelementHelper.findNextDoStatement(foundStatement); } while (foundStatement != null && !_directive.getRange().equals(foundStatement.getIterationRange())); } } if(foundStatement == null){ throw new IllegalTransformationException("No loop found in function", _directive.getPragma().getLineNo()); } if(!_directive.getRange().equals(foundStatement.getIterationRange())) { throw new IllegalTransformationException( "Iteration range is different than the loop to be extracted", _directive.getPragma().getLineNo() ); } return foundStatement; } /** * Create a new pragma statement and insert it after the insert point * @param insertPoint Statement just before the insertion * @param xcodeml The XcodeML representation. */ private void insertAccOption(Xpragma insertPoint, XcodeProgram xcodeml) throws IllegalTransformationException { Xpragma accAdditionalOption = XelementHelper. createEmpty(Xpragma.class, xcodeml); accAdditionalOption.setData(Constant.OPENACC_PREFIX + " " + _directive.getAccClauses()); XelementHelper.insertAfter(insertPoint, accAdditionalOption); } /** * Wrap a function call with a do statement. * @param xcodeml The XcodeML representation. * @param iterationRange Iteration range to be applied to the do statement. * @return The created do statement. */ private XdoStatement wrapCallWithLoop(XcodeProgram xcodeml, XloopIterationRange iterationRange) throws Exception { // Create a new empty loop XdoStatement loop = XelementHelper.createWithEmptyBody(xcodeml, iterationRange); // Insert the new empty loop just after the pragma XelementHelper.insertAfter(_directive.getPragma(), loop); // Move the call into the loop body XelementHelper.insertFctCallIntoLoop(loop, _fctCall); insertDeclaration(iterationRange.getInductionVar().getValue()); if(iterationRange.getIndexRange().getLowerBound().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getLowerBound().getValue()); } if(iterationRange.getIndexRange().getUpperBound().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getUpperBound().getValue()); } if(iterationRange.getIndexRange().getStep().getExprModel().isVar()){ insertDeclaration(iterationRange.getIndexRange().getStep().getValue()); } return loop; } /** * Insert new declaration in the function definition. * @param id The id used for insertion. */ private void insertDeclaration(String id){ Xid inductionVarId = _fctDef.getSymbolTable().get(id); if(inductionVarId == null){ Xid copyId = _fctDefToExtract.getSymbolTable().get(id); _fctDef.getSymbolTable().add(copyId); } XvarDecl inductionVarDecl = _fctDef.getDeclarationTable().get(id); if(inductionVarDecl == null){ XvarDecl copyDecl = _fctDefToExtract.getDeclarationTable().get(id); _fctDef.getDeclarationTable().add(copyDecl); } } /** * @see Transformation#canBeTransformedWith(Object) * @return Always false as independent transformation are applied one by one. */ public boolean canBeTransformedWith(LoopExtraction other) { return false; // Always false as independent transformation } }
Clean import
omni-cx2x/src/cx2x/translator/transformation/loop/LoopExtraction.java
Clean import
<ide><path>mni-cx2x/src/cx2x/translator/transformation/loop/LoopExtraction.java <ide> import cx2x.translator.language.ClawLanguage; <ide> import cx2x.translator.language.ClawMapping; <ide> import cx2x.translator.language.ClawMappingVar; <del>import cx2x.translator.language.ClawRange; <ide> import cx2x.xcodeml.helper.*; <ide> import cx2x.xcodeml.xelement.*; <ide> import cx2x.xcodeml.transformation.*;
Java
apache-2.0
e719a71f72885cee75f1b78648cb6606344f9c87
0
viacheslavokolitiy/AndEngine,Munazza/AndEngine,zhidew/AndEngine,zhidew/AndEngine,msdgwzhy6/AndEngine,yaye729125/gles,Munazza/AndEngine,jduberville/AndEngine,godghdai/AndEngine,ericlaro/AndEngineLibrary,parthipanramesh/AndEngine,godghdai/AndEngine,zhidew/AndEngine,shiguang1120/AndEngine,nicolasgramlich/AndEngine,viacheslavokolitiy/AndEngine,yaye729125/gles,zhidew/AndEngine,duchien85/AndEngine,zcwk/AndEngine,ericlaro/AndEngineLibrary,chautn/AndEngine,godghdai/AndEngine,zcwk/AndEngine,jduberville/AndEngine,pongo710/AndEngine,viacheslavokolitiy/AndEngine,yaye729125/gles,Munazza/AndEngine,yaye729125/gles,yudhir/AndEngine,jduberville/AndEngine,parthipanramesh/AndEngine,yudhir/AndEngine,borrom/AndEngine,parthipanramesh/AndEngine,luoxiaoshenghustedu/AndEngine,luoxiaoshenghustedu/AndEngine,nicolasgramlich/AndEngine,ericlaro/AndEngineLibrary,chautn/AndEngine,viacheslavokolitiy/AndEngine,chautn/AndEngine,duchien85/AndEngine,shiguang1120/AndEngine,jduberville/AndEngine,shiguang1120/AndEngine,godghdai/AndEngine,msdgwzhy6/AndEngine,pongo710/AndEngine,nicolasgramlich/AndEngine,Munazza/AndEngine,msdgwzhy6/AndEngine,shiguang1120/AndEngine,msdgwzhy6/AndEngine,zcwk/AndEngine,borrom/AndEngine,parthipanramesh/AndEngine,luoxiaoshenghustedu/AndEngine,borrom/AndEngine,yudhir/AndEngine,zcwk/AndEngine,borrom/AndEngine,luoxiaoshenghustedu/AndEngine,nicolasgramlich/AndEngine,yudhir/AndEngine,pongo710/AndEngine,pongo710/AndEngine,ericlaro/AndEngineLibrary,chautn/AndEngine
package org.anddev.andengine.opengl.texture.region; import org.anddev.andengine.opengl.texture.Texture; import org.anddev.andengine.opengl.texture.source.AssetTextureSource; import org.anddev.andengine.opengl.texture.source.ITextureSource; import org.anddev.andengine.opengl.texture.source.ResourceTextureSource; import android.content.Context; /** * @author Nicolas Gramlich * @since 18:15:14 - 09.03.2010 */ public class TextureRegionFactory { // =========================================================== // Constants // =========================================================== private static String sAssetBasePath = ""; // =========================================================== // Usability-Methods // =========================================================== public static void setAssetBasePath(final String pAssetBasePath) { TextureRegionFactory.sAssetBasePath = pAssetBasePath; } // =========================================================== // Extraction // =========================================================== public static TextureRegion extractFromTexture(final Texture pTexture, final int pTexturePositionX, final int pTexturePositionY, final int pWidth, final int pHeight) { return new TextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pWidth, pHeight); } // =========================================================== // From Asset // =========================================================== public static TextureRegion createFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY) { final ITextureSource textureSource = new AssetTextureSource(pContext, TextureRegionFactory.sAssetBasePath + pAssetPath); return createFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY); } public static TiledTextureRegion createTiledFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final ITextureSource textureSource = new AssetTextureSource(pContext, TextureRegionFactory.sAssetBasePath + pAssetPath); return createTiledFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY, pTileColumns, pTileRows); } // =========================================================== // From Resource // =========================================================== public static TextureRegion createFromResource(final Texture pTexture, final Context pContext, final int pDrawableResourceID, final int pTexturePositionX, final int pTexturePositionY) { final ITextureSource textureSource = new ResourceTextureSource(pContext, pDrawableResourceID); return createFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY); } public static TiledTextureRegion createTiledFromResource(final Texture pTexture, final Context pContext, final int pDrawableResourceID, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final ITextureSource textureSource = new ResourceTextureSource(pContext, pDrawableResourceID); return createTiledFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY, pTileColumns, pTileRows); } // =========================================================== // Worker-Methods // =========================================================== public static TextureRegion createFromSource(final Texture pTexture, final ITextureSource pTextureSource, final int pTexturePositionX, final int pTexturePositionY) { final TextureRegion textureRegion = new TextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pTextureSource.getWidth(), pTextureSource.getHeight()); pTexture.addTextureSource(pTextureSource, textureRegion.getTexturePositionX(), textureRegion.getTexturePositionY()); return textureRegion; } public static TiledTextureRegion createTiledFromSource(final Texture pTexture, final ITextureSource pTextureSource, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final TiledTextureRegion tiledTextureRegion = new TiledTextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pTextureSource.getWidth(), pTextureSource.getHeight(), pTileColumns, pTileRows); pTexture.addTextureSource(pTextureSource, tiledTextureRegion.getTexturePositionX(), tiledTextureRegion.getTexturePositionY()); return tiledTextureRegion; } }
src/org/anddev/andengine/opengl/texture/region/TextureRegionFactory.java
package org.anddev.andengine.opengl.texture.region; import org.anddev.andengine.opengl.texture.Texture; import org.anddev.andengine.opengl.texture.source.AssetTextureSource; import org.anddev.andengine.opengl.texture.source.ITextureSource; import org.anddev.andengine.opengl.texture.source.ResourceTextureSource; import android.content.Context; /** * @author Nicolas Gramlich * @since 18:15:14 - 09.03.2010 */ public class TextureRegionFactory { // =========================================================== // Constants // =========================================================== public static TextureRegion extractFromTexture(final Texture pTexture, int pTexturePositionX, int pTexturePositionY, int pWidth, int pHeight) { return new TextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pWidth, pHeight); } // =========================================================== // From Asset // =========================================================== public static TextureRegion createFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY) { final ITextureSource textureSource = new AssetTextureSource(pContext, pAssetPath); return createFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY); } public static TiledTextureRegion createTiledFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final ITextureSource textureSource = new AssetTextureSource(pContext, pAssetPath); return createTiledFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY, pTileColumns, pTileRows); } // =========================================================== // From Resource // =========================================================== public static TextureRegion createFromResource(final Texture pTexture, final Context pContext, final int pDrawableResourceID, final int pTexturePositionX, final int pTexturePositionY) { final ITextureSource textureSource = new ResourceTextureSource(pContext, pDrawableResourceID); return createFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY); } public static TiledTextureRegion createTiledFromResource(final Texture pTexture, final Context pContext, final int pDrawableResourceID, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final ITextureSource textureSource = new ResourceTextureSource(pContext, pDrawableResourceID); return createTiledFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY, pTileColumns, pTileRows); } // =========================================================== // Worker-Methods // =========================================================== public static TextureRegion createFromSource(final Texture pTexture, final ITextureSource pTextureSource, final int pTexturePositionX, final int pTexturePositionY) { final TextureRegion textureRegion = new TextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pTextureSource.getWidth(), pTextureSource.getHeight()); pTexture.addTextureSource(pTextureSource, textureRegion.getTexturePositionX(), textureRegion.getTexturePositionY()); return textureRegion; } public static TiledTextureRegion createTiledFromSource(final Texture pTexture, final ITextureSource pTextureSource, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { final TiledTextureRegion tiledTextureRegion = new TiledTextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pTextureSource.getWidth(), pTextureSource.getHeight(), pTileColumns, pTileRows); pTexture.addTextureSource(pTextureSource, tiledTextureRegion.getTexturePositionX(), tiledTextureRegion.getTexturePositionY()); return tiledTextureRegion; } }
Added TextureRegion "extraction" method to TextureRegionFactory. Added BaseAssetPath to TextureRegionFactory.
src/org/anddev/andengine/opengl/texture/region/TextureRegionFactory.java
Added TextureRegion "extraction" method to TextureRegionFactory. Added BaseAssetPath to TextureRegionFactory.
<ide><path>rc/org/anddev/andengine/opengl/texture/region/TextureRegionFactory.java <ide> package org.anddev.andengine.opengl.texture.region; <add> <ide> <ide> import org.anddev.andengine.opengl.texture.Texture; <ide> import org.anddev.andengine.opengl.texture.source.AssetTextureSource; <ide> // =========================================================== <ide> // Constants <ide> // =========================================================== <del> <del> public static TextureRegion extractFromTexture(final Texture pTexture, int pTexturePositionX, int pTexturePositionY, int pWidth, int pHeight) { <add> <add> private static String sAssetBasePath = ""; <add> <add> // =========================================================== <add> // Usability-Methods <add> // =========================================================== <add> <add> public static void setAssetBasePath(final String pAssetBasePath) { <add> TextureRegionFactory.sAssetBasePath = pAssetBasePath; <add> } <add> <add> // =========================================================== <add> // Extraction <add> // =========================================================== <add> <add> public static TextureRegion extractFromTexture(final Texture pTexture, final int pTexturePositionX, final int pTexturePositionY, final int pWidth, final int pHeight) { <ide> return new TextureRegion(pTexture, pTexturePositionX, pTexturePositionY, pWidth, pHeight); <ide> } <ide> <ide> // =========================================================== <ide> <ide> public static TextureRegion createFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY) { <del> final ITextureSource textureSource = new AssetTextureSource(pContext, pAssetPath); <add> final ITextureSource textureSource = new AssetTextureSource(pContext, TextureRegionFactory.sAssetBasePath + pAssetPath); <ide> return createFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY); <ide> } <ide> <ide> public static TiledTextureRegion createTiledFromAsset(final Texture pTexture, final Context pContext, final String pAssetPath, final int pTexturePositionX, final int pTexturePositionY, final int pTileColumns, final int pTileRows) { <del> final ITextureSource textureSource = new AssetTextureSource(pContext, pAssetPath); <add> final ITextureSource textureSource = new AssetTextureSource(pContext, TextureRegionFactory.sAssetBasePath + pAssetPath); <ide> return createTiledFromSource(pTexture, textureSource, pTexturePositionX, pTexturePositionY, pTileColumns, pTileRows); <ide> } <ide>
Java
apache-2.0
763a5760c9e856f18dfd5a6a0a3ba03b6348302e
0
supriyantomaftuh/java-client-api,marklogic/java-client-api,marklogic/java-client-api,omkarudipi/java-client-api,omkarudipi/java-client-api,omkarudipi/java-client-api,marklogic/java-client-api,marklogic/java-client-api,supriyantomaftuh/java-client-api,marklogic/java-client-api,supriyantomaftuh/java-client-api,grechaw/java-client-api,omkarudipi/java-client-api,supriyantomaftuh/java-client-api,grechaw/java-client-api
/* * Copyright 2012 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.impl; import java.io.InputStream; import java.io.PrintStream; import java.io.Reader; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import com.marklogic.client.config.DeleteQueryDefinition; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.auth.params.AuthPNames; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.params.AuthPolicy; import org.apache.http.client.params.ClientPNames; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.scheme.SchemeSocketFactory; import org.apache.http.conn.ssl.AbstractVerifier; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.marklogic.client.ContentDescriptor; import com.marklogic.client.DatabaseClientFactory.Authentication; import com.marklogic.client.DatabaseClientFactory.SSLHostnameVerifier; import com.marklogic.client.DocumentDescriptor; import com.marklogic.client.DocumentManager.Metadata; import com.marklogic.client.ElementLocator; import com.marklogic.client.FailedRequestException; import com.marklogic.client.ForbiddenUserException; import com.marklogic.client.Format; import com.marklogic.client.KeyLocator; import com.marklogic.client.MarkLogicInternalException; import com.marklogic.client.QueryManager; import com.marklogic.client.RequestLogger; import com.marklogic.client.RequestParameters; import com.marklogic.client.ResourceNotFoundException; import com.marklogic.client.ValueLocator; import com.marklogic.client.config.KeyValueQueryDefinition; import com.marklogic.client.config.QueryDefinition; import com.marklogic.client.config.StringQueryDefinition; import com.marklogic.client.config.StructuredQueryDefinition; import com.marklogic.client.config.ValuesDefinition; import com.marklogic.client.config.ValuesListDefinition; import com.marklogic.client.io.OutputStreamSender; import com.marklogic.client.io.marker.AbstractReadHandle; import com.marklogic.client.io.marker.AbstractWriteHandle; import com.marklogic.client.io.marker.DocumentMetadataReadHandle; import com.marklogic.client.io.marker.DocumentMetadataWriteHandle; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; import com.sun.jersey.api.client.filter.HTTPDigestAuthFilter; import com.sun.jersey.client.apache4.ApacheHttpClient4; import com.sun.jersey.client.apache4.config.ApacheHttpClient4Config; import com.sun.jersey.client.apache4.config.DefaultApacheHttpClient4Config; import com.sun.jersey.core.util.MultivaluedMapImpl; import com.sun.jersey.multipart.BodyPart; import com.sun.jersey.multipart.Boundary; import com.sun.jersey.multipart.MultiPart; import com.sun.jersey.multipart.MultiPartMediaTypes; public class JerseyServices implements RESTServices { static final private Logger logger = LoggerFactory .getLogger(JerseyServices.class); static final String ERROR_NS = "http://marklogic.com/rest-api"; protected class HostnameVerifierAdapter extends AbstractVerifier { private SSLHostnameVerifier verifier; protected HostnameVerifierAdapter(SSLHostnameVerifier verifier) { super(); this.verifier = verifier; } @Override public void verify(String hostname, String[] cns, String[] subjectAlts) throws SSLException { verifier.verify(hostname, cns, subjectAlts); } } private ApacheHttpClient4 client; private WebResource connection; private boolean isFirstRequest = true; public JerseyServices() { } private FailedRequest extractErrorFields(ClientResponse response) { InputStream is = response.getEntityInputStream(); try { FailedRequest handler = new FailedRequest(is); return handler; } catch (RuntimeException e) { throw (e); } finally { response.close(); } } @Override public void connect(String host, int port, String user, String password, Authentication type, SSLContext context, SSLHostnameVerifier verifier) { X509HostnameVerifier x509Verifier = null; if (verifier == null) ; else if (verifier == SSLHostnameVerifier.ANY) x509Verifier = SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; else if (verifier == SSLHostnameVerifier.COMMON) x509Verifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; else if (verifier == SSLHostnameVerifier.STRICT) x509Verifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER; else if (context != null && verifier != null) x509Verifier = new HostnameVerifierAdapter(verifier); else if (context != null) x509Verifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; else if (verifier != null) throw new IllegalArgumentException( "Null SSLContent but non-null SSLHostnameVerifier for client"); connect(host, port, user, password, type, context, x509Verifier); } private void connect(String host, int port, String user, String password, Authentication type, SSLContext context, X509HostnameVerifier verifier) { if (logger.isInfoEnabled()) logger.info("Connecting to {} at {} as {}", new Object[] { host, port, user }); if (host == null) throw new IllegalArgumentException("No host provided"); if (user == null) throw new IllegalArgumentException("No user provided"); if (password == null) throw new IllegalArgumentException("No password provided"); if (type == null) { if (context != null) { type = Authentication.BASIC; } else { throw new IllegalArgumentException( "No authentication type provided"); } } if (connection != null) connection = null; if (client != null) { client.destroy(); client = null; } // TODO: integrated control of HTTP Client and Jersey Client logging System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); System.setProperty( "org.apache.commons.logging.simplelog.log.httpclient.wire.header", "warn"); System.setProperty( "org.apache.commons.logging.simplelog.log.org.apache.commons.httpclient", "warn"); Scheme scheme = null; if (context == null) { SchemeSocketFactory socketFactory = PlainSocketFactory .getSocketFactory(); scheme = new Scheme("http", port, socketFactory); } else { SSLSocketFactory socketFactory = new SSLSocketFactory(context, verifier); scheme = new Scheme("https", port, socketFactory); } SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(scheme); ThreadSafeClientConnManager connMgr = new ThreadSafeClientConnManager( schemeRegistry); connMgr.setDefaultMaxPerRoute(100); CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials(new AuthScope(host, port), new UsernamePasswordCredentials(user, password)); List<String> authpref = new ArrayList<String>(); if (type == Authentication.BASIC) authpref.add(AuthPolicy.BASIC); else if (type == Authentication.DIGEST) authpref.add(AuthPolicy.DIGEST); else throw new MarkLogicInternalException( "Internal error - unknown authentication type: " + type.name()); HttpParams httpParams = new BasicHttpParams(); httpParams.setParameter(AuthPNames.PROXY_AUTH_PREF, authpref); // note that setting PROPERTY_FOLLOW_REDIRECTS below doesn't seem to // work httpParams.setBooleanParameter(ClientPNames.HANDLE_REDIRECTS, false); DefaultApacheHttpClient4Config config = new DefaultApacheHttpClient4Config(); Map<String, Object> configProps = config.getProperties(); configProps .put(ApacheHttpClient4Config.PROPERTY_PREEMPTIVE_BASIC_AUTHENTICATION, false); configProps.put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, connMgr); configProps.put(ApacheHttpClient4Config.PROPERTY_FOLLOW_REDIRECTS, false); // configProps.put(ApacheHttpClient4Config.PROPERTY_CREDENTIALS_PROVIDER, // credentialsProvider); configProps.put(ApacheHttpClient4Config.PROPERTY_HTTP_PARAMS, httpParams); // configProps.put(ApacheHttpClient4Config.PROPERTY_CHUNKED_ENCODING_SIZE, // 0); client = ApacheHttpClient4.create(config); // System.setProperty("javax.net.debug", "all"); // all or ssl if (type == Authentication.BASIC) client.addFilter(new HTTPBasicAuthFilter(user, password)); else if (type == Authentication.DIGEST) client.addFilter(new HTTPDigestAuthFilter(user, password)); else throw new MarkLogicInternalException( "Internal error - unknown authentication type: " + type.name()); connection = client.resource(((context == null) ? "http" : "https") + "://" + host + ":" + port + "/v1/"); } @Override public void release() { if (client == null) return; if (logger.isInfoEnabled()) logger.info("Releasing connection"); connection = null; // client.getClientHandler().getHttpClient().getConnectionManager().shutdown(); client.destroy(); client = null; isFirstRequest = true; } private void makeFirstRequest() { connection.path("ping").head(); } @Override public void deleteDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document delete for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Deleting {} in transaction {}", uri, transactionId); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, categories, transactionId, null)); WebResource.Builder builder = addVersionHeader(desc, webResource.getRequestBuilder(), "If-Match"); ClientResponse response = builder.delete(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) { response.close(); throw new ResourceNotFoundException( "Could not delete non-existent document"); } if (status == ClientResponse.Status.FORBIDDEN) { // TODO: inspect response structure to distinguish from insufficient privilege if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to delete document", extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to delete documents",extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) { response.close(); throw new FailedRequestException( "Content version must match to delete document"); } if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s document", uri); } @Override public boolean getDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, DocumentMetadataReadHandle metadataHandle, AbstractReadHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { HandleImplementation metadataBase = HandleAccessor.checkHandle( metadataHandle, "metadata"); HandleImplementation contentBase = HandleAccessor.checkHandle( contentHandle, "content"); String metadataFormat = null; String metadataMimetype = null; if (metadataBase != null) { metadataFormat = metadataBase.getFormat().toString().toLowerCase(); metadataMimetype = metadataBase.getMimetype(); } String contentMimetype = null; if (contentBase != null) { contentMimetype = contentBase.getMimetype(); } if (metadataBase != null && contentBase != null) { return getDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataFormat, metadataHandle, contentHandle); } else if (metadataBase != null) { return getDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle); } else if (contentBase != null) { return getDocumentImpl(reqlog, desc, transactionId, null, extraParams, contentMimetype, contentHandle); } return false; } private boolean getDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String mimetype, AbstractReadHandle handle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document read for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Getting {} in transaction {}", uri, transactionId); WebResource.Builder builder = makeDocumentResource( makeDocumentParams(uri, categories, transactionId, extraParams)) .accept(mimetype); if (extraParams != null && extraParams.containsKey("range")) builder = builder.header("range", extraParams.get("range").get(0)); builder = addVersionHeader(desc, builder, "If-None-Match"); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not read non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to read documents", extractErrorFields(response)); if (status == ClientResponse.Status.NOT_MODIFIED) { response.close(); return false; } if (status != ClientResponse.Status.OK && status != ClientResponse.Status.PARTIAL_CONTENT) throw new FailedRequestException("read failed: " + status.getReasonPhrase(), extractErrorFields(response)); logRequest( reqlog, "read %s document from %s transaction with %s mime type and %s metadata categories", uri, (transactionId != null) ? transactionId : "no", (mimetype != null) ? mimetype : "no", stringJoin(categories, ", ", "no")); HandleImplementation handleBase = HandleAccessor.as(handle); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); if (isExternalDescriptor(desc)) { updateVersion(desc, responseHeaders); updateDescriptor(desc, responseHeaders); copyDescriptor(desc, handleBase); } else { updateDescriptor(handleBase, responseHeaders); } Class as = handleBase.receiveAs(); Object entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); handleBase.receiveContent( (reqlog != null) ? reqlog.copyContent(entity) : entity); return true; } private boolean getDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String metadataFormat, DocumentMetadataReadHandle metadataHandle, AbstractReadHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document read for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Getting multipart for {} in transaction {}", uri, transactionId); MultivaluedMap<String, String> docParams = makeDocumentParams(uri, categories, transactionId, extraParams, true); docParams.add("format", metadataFormat); WebResource.Builder builder = makeDocumentResource(docParams).accept( Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); builder = addVersionHeader(desc, builder, "If-None-Match"); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not read non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to read documents", extractErrorFields(response)); if (status == ClientResponse.Status.NOT_MODIFIED) { response.close(); return false; } if (status != ClientResponse.Status.OK) throw new FailedRequestException("read failed: " + status.getReasonPhrase(), extractErrorFields(response)); logRequest( reqlog, "read %s document from %s transaction with %s metadata categories and content", uri, (transactionId != null) ? transactionId : "no", stringJoin(categories, ", ", "no")); MultiPart entity = response.getEntity(MultiPart.class); if (entity == null) return false; List<BodyPart> partList = entity.getBodyParts(); if (partList == null) return false; int partCount = partList.size(); if (partCount == 0) return false; if (partCount != 2) throw new FailedRequestException("read expected 2 parts but got " + partCount + " parts"); HandleImplementation metadataBase = HandleAccessor.as(metadataHandle); HandleImplementation contentBase = HandleAccessor.as(contentHandle); BodyPart contentPart = partList.get(1); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); MultivaluedMap<String, String> contentHeaders = contentPart.getHeaders(); if (isExternalDescriptor(desc)) { updateVersion(desc, responseHeaders); updateFormat(desc, responseHeaders); updateMimetype(desc, contentHeaders); desc.setByteLength(ContentDescriptor.UNKNOWN_LENGTH); copyDescriptor(desc, contentBase); } else if (contentBase != null) { updateFormat(contentBase, responseHeaders); updateMimetype(contentBase, contentHeaders); contentBase.setByteLength(ContentDescriptor.UNKNOWN_LENGTH); } metadataBase.receiveContent( partList.get(0).getEntityAs(metadataBase.receiveAs()) ); Object contentEntity = contentPart.getEntityAs( contentBase.receiveAs()); contentBase.receiveContent( (reqlog != null) ? reqlog.copyContent(contentEntity) : contentEntity); response.close(); return true; } @Override public DocumentDescriptor head(RequestLogger reqlog, String uri, String transactionId) throws ForbiddenUserException, FailedRequestException { if (uri == null) throw new IllegalArgumentException( "Existence check for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Requesting head for {} in transaction {}", uri, transactionId); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, null, transactionId, null)); ClientResponse response = webResource.head(); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); ClientResponse.Status status = response.getClientResponseStatus(); if (status != ClientResponse.Status.OK) { if (status == ClientResponse.Status.NOT_FOUND) { response.close(); return null; } else if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to check the existence of documents", extractErrorFields(response)); else throw new FailedRequestException( "Document existence check failed: " + status.getReasonPhrase(), extractErrorFields(response)); } response.close(); logRequest(reqlog, "checked %s document from %s transaction", uri, (transactionId != null) ? transactionId : "no"); DocumentDescriptorImpl desc = new DocumentDescriptorImpl(uri,false); updateVersion(desc,responseHeaders); updateDescriptor(desc, responseHeaders); return desc; } @Override public void putDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, DocumentMetadataWriteHandle metadataHandle, AbstractWriteHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { HandleImplementation metadataBase = HandleAccessor.checkHandle( metadataHandle, "metadata"); HandleImplementation contentBase = HandleAccessor.checkHandle( contentHandle, "content"); String metadataMimetype = null; if (metadataBase != null) { metadataMimetype = metadataBase.getMimetype(); } Format descFormat = desc.getFormat(); String contentMimetype = (descFormat != null && descFormat != Format.UNKNOWN) ? desc.getMimetype() : null; if (contentMimetype == null && contentBase != null) { Format contentFormat = contentBase.getFormat(); if (descFormat != null && descFormat != contentFormat) { contentMimetype = descFormat.getDefaultMimetype(); } else if (contentFormat != null && contentFormat != Format.UNKNOWN) { contentMimetype = contentBase.getMimetype(); } } if (metadataBase != null && contentBase != null) { putDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle, contentMimetype, contentHandle); } else if (metadataBase != null) { putDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle); } else if (contentBase != null) { putDocumentImpl(reqlog, desc, transactionId, null, extraParams, contentMimetype, contentHandle); } } private void putDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String mimetype, AbstractWriteHandle handle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document write for document identifier without uri"); Object value = HandleAccessor.as(handle).sendContent(); if (value == null) throw new IllegalArgumentException( "Document write with null value for " + uri); if (logger.isInfoEnabled()) logger.info("Putting {} in transaction {}", uri, transactionId); logRequest( reqlog, "writing %s document from %s transaction with %s mime type and %s metadata categories", uri, (transactionId != null) ? transactionId : "no", (mimetype != null) ? mimetype : "no", stringJoin(categories, ", ", "no")); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, categories, transactionId, extraParams)); WebResource.Builder builder = webResource .type((mimetype != null) ? mimetype : MediaType.WILDCARD); builder = addVersionHeader(desc, builder, "If-Match"); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .put(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); if (isFirstRequest) isFirstRequest = false; } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.put(ClientResponse.class, reqlog.copyContent(value)); else response = builder.put(ClientResponse.class, value); if (isFirstRequest) isFirstRequest = false; } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not write non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) { if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to write document",extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to write documents",extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) throw new FailedRequestException( "Content version must match to write document", extractErrorFields(response)); if (status != ClientResponse.Status.CREATED && status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("write failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } private void putDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String metadataMimetype, DocumentMetadataWriteHandle metadataHandle, String contentMimetype, AbstractWriteHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document write for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Putting multipart for {} in transaction {}", uri, transactionId); logRequest( reqlog, "writing %s document from %s transaction with %s metadata categories and content", uri, (transactionId != null) ? transactionId : "no", stringJoin(categories, ", ", "no")); boolean hasStreamingPart = false; MultiPart multiPart = new MultiPart(); multiPart.setMediaType(new MediaType("multipart", "mixed")); for (int i = 0; i < 2; i++) { String mimetype = null; Object value = null; if (i == 0) { mimetype = metadataMimetype; value = HandleAccessor.as(metadataHandle).sendContent(); } else { mimetype = contentMimetype; value = HandleAccessor.as(contentHandle).sendContent(); } String[] typeParts = (mimetype != null && mimetype.contains("/")) ? mimetype.split("/", 2) : null; MediaType typePart = (typeParts != null) ? new MediaType(typeParts[0], typeParts[1]) : MediaType.WILDCARD_TYPE; BodyPart bodyPart = null; if (value instanceof OutputStreamSender) { hasStreamingPart = true; bodyPart = new BodyPart(new StreamingOutputImpl( (OutputStreamSender) value, reqlog), typePart); } else { if (value instanceof InputStream || value instanceof Reader) hasStreamingPart = true; if (reqlog != null) bodyPart = new BodyPart(reqlog.copyContent(value), typePart); else bodyPart = new BodyPart(value, typePart); } multiPart = multiPart.bodyPart(bodyPart); } MultivaluedMap<String, String> docParams = makeDocumentParams(uri, categories, transactionId, extraParams, true); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); WebResource.Builder builder = makeDocumentResource(docParams).type( Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE) ); builder = addVersionHeader(desc, builder, "If-Match"); ClientResponse response = builder.put(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) { response.close(); throw new ResourceNotFoundException( "Could not write non-existent document"); } if (status == ClientResponse.Status.FORBIDDEN) { // TODO: inspect response structure to distinguish from insufficient privilege if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to write document", extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to write documents", extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) { response.close(); throw new FailedRequestException( "Content version must match to write document"); } if (status != ClientResponse.Status.CREATED && status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("write failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } @Override public String openTransaction(String name, int timeLimit) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Opening transaction"); MultivaluedMap<String, String> transParams = null; if (name != null || timeLimit > 0) { transParams = new MultivaluedMapImpl(); if (name != null) transParams.add("name", name); if (timeLimit > 0) transParams.add("timeLimit", String.valueOf(timeLimit)); } WebResource resource = (transParams != null) ? connection.path( "transactions").queryParams(transParams) : connection .path("transactions"); ClientResponse response = resource.post(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to open transactions", extractErrorFields(response)); if (status != ClientResponse.Status.SEE_OTHER) throw new FailedRequestException("transaction open failed: " + status.getReasonPhrase(), extractErrorFields(response)); String location = response.getHeaders().getFirst("Location"); response.close(); if (location == null) throw new MarkLogicInternalException( "transaction open failed to provide location"); if (!location.contains("/")) throw new MarkLogicInternalException( "transaction open produced invalid location " + location); return location.substring(location.lastIndexOf("/") + 1); } @Override public void commitTransaction(String transactionId) throws ForbiddenUserException, FailedRequestException { completeTransaction(transactionId, "commit"); } @Override public void rollbackTransaction(String transactionId) throws ForbiddenUserException, FailedRequestException { completeTransaction(transactionId, "rollback"); } private void completeTransaction(String transactionId, String result) throws ForbiddenUserException, FailedRequestException { if (result == null) throw new MarkLogicInternalException( "transaction completion without operation"); if (transactionId == null) throw new MarkLogicInternalException( "transaction completion without id: " + result); if (logger.isInfoEnabled()) logger.info("Completing transaction {} with {}", transactionId, result); MultivaluedMap<String, String> transParams = new MultivaluedMapImpl(); transParams.add("result", result); ClientResponse response = connection .path("transactions/" + transactionId).queryParams(transParams) .post(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to complete transaction with " + result, extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("transaction " + result + " failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } private MultivaluedMap<String, String> makeDocumentParams(String uri, Set<Metadata> categories, String transactionId, RequestParameters extraParams) { return makeDocumentParams(uri, categories, transactionId, extraParams, false); } private MultivaluedMap<String, String> makeDocumentParams(String uri, Set<Metadata> categories, String transactionId, RequestParameters extraParams, boolean withContent) { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); if (extraParams != null && extraParams.size() > 0) { for (Map.Entry<String, List<String>> entry : extraParams.entrySet()) { String extraKey = entry.getKey(); if (!"range".equalsIgnoreCase(extraKey)) docParams.put(extraKey, entry.getValue()); } } docParams.add("uri", uri); if (categories == null || categories.size() == 0) { docParams.add("category", "content"); } else { if (withContent) docParams.add("category", "content"); if (categories.contains(Metadata.ALL)) { for (String category : new String[] { "collections", "permissions", "properties", "quality" }) docParams.add("category", category); } else { for (Metadata category : categories) docParams.add("category", category.name().toLowerCase()); } } if (transactionId != null) docParams.add("txid", transactionId); return docParams; } private WebResource makeDocumentResource( MultivaluedMap<String, String> queryParams) { return connection.path("documents").queryParams(queryParams); } private boolean isExternalDescriptor(ContentDescriptor desc) { return desc != null && desc instanceof DocumentDescriptorImpl && !((DocumentDescriptorImpl) desc).isInternal(); } private void updateDescriptor(ContentDescriptor desc, MultivaluedMap<String, String> headers) { if (desc == null || headers == null) return; updateFormat(desc, headers); updateMimetype(desc, headers); updateLength(desc, headers); } private void copyDescriptor(DocumentDescriptor desc, HandleImplementation handleBase) { if (handleBase == null) return; handleBase.setFormat(desc.getFormat()); handleBase.setMimetype(desc.getMimetype()); handleBase.setByteLength(desc.getByteLength()); } private void updateFormat(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { if (headers.containsKey("vnd.marklogic.document-format")) { List<String> values = headers.get("vnd.marklogic.document-format"); if (values != null) { Format format = Format.valueOf(values.get(0).toUpperCase()); if (format != null) { descriptor.setFormat(format); } } } } private void updateMimetype(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { if (headers.containsKey("Content-Type")) { List<String> values = headers.get("Content-Type"); if (values != null) { String contentType = values.get(0); String mimetype = contentType.contains(";") ? contentType .substring(0, contentType.indexOf(";")) : contentType; // TODO: if "; charset=foo" set character set if (mimetype != null && mimetype.length() > 0) { descriptor.setMimetype(mimetype); } } } } private void updateLength(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { long length = ContentDescriptor.UNKNOWN_LENGTH; if (headers.containsKey("Content-Length")) { List<String> values = headers.get("Content-Length"); if (values != null) { length = Long.valueOf(values.get(0)); } } descriptor.setByteLength(length); } private void updateVersion(DocumentDescriptor descriptor, MultivaluedMap<String, String> headers) { long version = DocumentDescriptor.UNKNOWN_VERSION; if (headers.containsKey("ETag")) { List<String> values = headers.get("ETag"); if (values != null) { version = Long.valueOf(values.get(0)); } } descriptor.setVersion(version); } private WebResource.Builder addVersionHeader(DocumentDescriptor desc, WebResource.Builder builder, String name) { if (desc != null && desc instanceof DocumentDescriptorImpl && !((DocumentDescriptorImpl) desc).isInternal()) { long version = desc.getVersion(); if (version != DocumentDescriptor.UNKNOWN_VERSION) { return builder.header(name, String.valueOf(version)); } } return builder; } @Override public <T> T search(Class<T> as, QueryDefinition queryDef, String mimetype, long start, long len, QueryManager.ResponseViews views, String transactionId) throws ForbiddenUserException, FailedRequestException { RequestParameters params = new RequestParameters(); ClientResponse response = null; if (start > 1) { params.put("start", "" + start); } if (len > 0) { params.put("pageLength", "" + len); } for (QueryManager.QueryView view : views) { if (view == QueryManager.QueryView.SEARCH) { params.put("view", "search"); } else if (view == QueryManager.QueryView.FACETS) { params.put("view", "facets"); } else if (view == QueryManager.QueryView.METRICS) { params.put("view", "metrics"); } } if (queryDef.getDirectory() != null) { params.put("directory", queryDef.getDirectory()); } for (String collection : queryDef.getCollections()) { params.put("collection", collection); } if (transactionId != null) { params.put("txid", transactionId); } String optionsName = queryDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { params.put("options", optionsName); } if (queryDef instanceof StringQueryDefinition) { String text = ((StringQueryDefinition) queryDef).getCriteria(); if (logger.isInfoEnabled()) logger.info("Searching for {} in transaction {}", text, transactionId); if (text != null) { params.put("q", text); } response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; } else if (queryDef instanceof KeyValueQueryDefinition) { Map<ValueLocator, String> pairs = ((KeyValueQueryDefinition) queryDef); if (logger.isInfoEnabled()) logger.info("Searching for keys/values in transaction {}", transactionId); for (ValueLocator loc : pairs.keySet()) { if (loc instanceof KeyLocator) { params.put("key", ((KeyLocator) loc).getKey()); } else { ElementLocator eloc = (ElementLocator) loc; params.put("element", eloc.getElement().toString()); if (eloc.getAttribute() != null) { params.put("attribute", eloc.getAttribute().toString()); } } params.put("value", pairs.get(loc)); } response = connection.path("keyvalue") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; } else if (queryDef instanceof StructuredQueryDefinition) { String structure = ((StructuredQueryDefinition) queryDef) .serialize(); response = connection.path("search").queryParams(((RequestParametersImplementation) params).getMapImpl()) .type("application/xml") .post(ClientResponse.class, structure); isFirstRequest = false; } else if (queryDef instanceof DeleteQueryDefinition) { if (logger.isInfoEnabled()) logger.info("Searching for deletes in transaction {}", transactionId); response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); isFirstRequest = false; } else { throw new UnsupportedOperationException("Cannot search with " + queryDef.getClass().getName()); } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public void deleteSearch(DeleteQueryDefinition queryDef, String transactionId) throws ForbiddenUserException, FailedRequestException { RequestParameters params = new RequestParameters(); ClientResponse response = null; if (queryDef.getDirectory() != null) { params.put("directory", queryDef.getDirectory()); } for (String collection : queryDef.getCollections()) { params.put("collection", collection); } if (transactionId != null) { params.put("txid", transactionId); } response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .delete(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to delete", extractErrorFields(response)); } if (status != ClientResponse.Status.NO_CONTENT) { throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); } } @Override public <T> T values(Class<T> as, ValuesDefinition valDef, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; String optionsName = valDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { docParams.add("options", optionsName); } if (valDef.getAggregate() != null) { docParams.add("aggregate", valDef.getAggregate()); } if (valDef.getAggregatePath() != null) { docParams.add("aggregatePath", valDef.getAggregatePath()); } if (valDef.getView() != null) { docParams.add("view", valDef.getView()); } if (valDef.getDirection() != null) { if (valDef.getDirection() == ValuesDefinition.Direction.ASCENDING) { docParams.add("direction", "ascending"); } else { docParams.add("direction", "descending"); } } if (valDef.getFrequency() != null) { if (valDef.getFrequency() == ValuesDefinition.Frequency.FRAGMENT) { docParams.add("frequency", "fragment"); } else { docParams.add("frequency", "item"); } } if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "values"; if (valDef.getName() != null) { uri += "/" + valDef.getName(); } response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public <T> T valuesList(Class<T> as, ValuesListDefinition valDef, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; String optionsName = valDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { docParams.add("options", optionsName); } if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "values"; response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public <T> T optionsList(Class<T> as, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "config/query"; response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } // namespaces, search options etc. @Override public <T> T getValue(RequestLogger reqlog, String type, String key, String mimetype, Class<T> as) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Getting {}/{}", type, key); ClientResponse response = connection.path(type + "/" + key) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status != ClientResponse.Status.OK) { if (status == ClientResponse.Status.NOT_FOUND) { response.close(); return null; } else if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to read " + type, extractErrorFields(response)); else throw new FailedRequestException(type + " read failed: " + status.getReasonPhrase(), extractErrorFields(response)); } logRequest(reqlog, "read %s value with %s key and %s mime type", type, key, (mimetype != null) ? mimetype : null); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } @Override public <T> T getValues(RequestLogger reqlog, String type, String mimetype, Class<T> as) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Getting {}", type); ClientResponse response = connection.path(type).accept(mimetype) .get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to read " + type, extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException(type + " read failed: " + status.getReasonPhrase(), extractErrorFields(response)); } logRequest(reqlog, "read %s values with %s mime type", type, (mimetype != null) ? mimetype : null); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } @Override public void putValues(RequestLogger reqlog, String type, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}", type); putPostValueImpl(reqlog, "put", type, null, null, mimetype, value, ClientResponse.Status.NO_CONTENT); } @Override public void postValues(RequestLogger reqlog, String type, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}", type); putPostValueImpl(reqlog, "post", type, null, null, mimetype, value, ClientResponse.Status.NO_CONTENT); } @Override public void postValue(RequestLogger reqlog, String type, String key, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}/{}", type, key); putPostValueImpl(reqlog, "post", type, key, null, mimetype, value, ClientResponse.Status.CREATED); } @Override public void putValue(RequestLogger reqlog, String type, String key, String mimetype, Object value) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Putting {}/{}", type, key); putPostValueImpl(reqlog, "put", type, key, null, mimetype, value, ClientResponse.Status.NO_CONTENT, ClientResponse.Status.CREATED); } @Override public void putValue(RequestLogger reqlog, String type, String key, RequestParameters extraParams, String mimetype, Object value) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Putting {}/{}", type, key); putPostValueImpl(reqlog, "put", type, key, extraParams, mimetype, value, ClientResponse.Status.NO_CONTENT); } private void putPostValueImpl(RequestLogger reqlog, String method, String type, String key, RequestParameters extraParams, String mimetype, Object value, ClientResponse.Status... expectedStatuses) { if (key != null) { logRequest(reqlog, "writing %s value with %s key and %s mime type", type, key, (mimetype != null) ? mimetype : null); } else { logRequest(reqlog, "writing %s values with %s mime type", type, (mimetype != null) ? mimetype : null); } boolean hasStreamingPart = false; Object sentValue = null; if (value instanceof OutputStreamSender) { hasStreamingPart = true; sentValue = new StreamingOutputImpl((OutputStreamSender) value, reqlog); } else { if (value instanceof InputStream || value instanceof Reader) hasStreamingPart = true; if (reqlog != null) sentValue = reqlog.copyContent(value); else sentValue = value; } MultivaluedMap<String, String> requestParams = convertParams(extraParams); ClientResponse response = null; if ("put".equals(method)) { if (isFirstRequest && hasStreamingPart) makeFirstRequest(); String connectPath = (key != null) ? type + "/" + key : type; WebResource resource = (requestParams == null) ? connection .path(connectPath) : connection.path(connectPath) .queryParams(requestParams); response = resource.type(mimetype).put(ClientResponse.class, sentValue); if (isFirstRequest) isFirstRequest = false; } else if ("post".equals(method)) { if (isFirstRequest && hasStreamingPart) makeFirstRequest(); WebResource resource = (requestParams == null) ? connection .path(type) : connection.path(type).queryParams( requestParams); response = resource.type(mimetype).post(ClientResponse.class, sentValue); if (isFirstRequest) isFirstRequest = false; } else { throw new MarkLogicInternalException("unknown method type " + method); } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to write " + type, extractErrorFields(response)); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException(type + " not found for write", extractErrorFields(response)); boolean statusOk = false; for (ClientResponse.Status expectedStatus : expectedStatuses) { statusOk = statusOk || (status == expectedStatus); if (statusOk) { break; } } if (!statusOk) { throw new FailedRequestException(type + " write failed: " + status.getReasonPhrase(), extractErrorFields(response)); } response.close(); } @Override public void deleteValue(RequestLogger reqlog, String type, String key) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Deleting {}/{}", type, key); ClientResponse response = connection.path(type + "/" + key).delete( ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to delete " + type, extractErrorFields(response)); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException(type + " not found for delete", extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s value with %s key", type, key); } @Override public void deleteValues(RequestLogger reqlog, String type) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Deleting {}", type); ClientResponse response = connection.path(type).delete( ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to delete " + type, extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s values", type); } @Override public <T> T getResource(RequestLogger reqlog, String path, RequestParameters params, String mimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doGet(path, params, mimetype); checkStatus(response, "read", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResult(reqlog, "read", "resource", response, as); } @Override public Object[] getResource(RequestLogger reqlog, String path, RequestParameters params, String[] mimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doGet(path, params, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "read", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResults(reqlog, "read", "resource", response, as); } @Override public <T> T putResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPut(reqlog, path, params, inputMimetype, value, outputMimetype); checkStatus(response, "write", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "write", "resource", response, as); } @Override public <T> T putResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPut(reqlog, path, params, inputMimetypes, values, outputMimetype); checkStatus(response, "write", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "write", "resource", response, as); } @Override public Object postResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String outputMimetype, Class as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetype, value, outputMimetype); checkStatus(response, "apply", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "apply", "resource", response, as); } @Override public Object postResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype, Class as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetypes, values, outputMimetype); checkStatus(response, "apply", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "apply", "resource", response, as); } @Override public Object[] postResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String[] outputMimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetype, value, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "apply", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResults(reqlog, "apply", "resource", response, as); } @Override public Object[] postResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String[] outputMimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetypes, values, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "apply", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResults(reqlog, "apply", "resource", response, as); } @Override public <T> T deleteResource(RequestLogger reqlog, String path, RequestParameters params, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doDelete(reqlog, path, params, outputMimetype); checkStatus(response, "delete", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResult(reqlog, "delete", "resource", response, as); } private ClientResponse doGet(String path, RequestParameters params, Object mimetype) { if (path == null) throw new IllegalArgumentException("Read with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), null, mimetype); if (logger.isInfoEnabled()) logger.info("Getting {}", path); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPut(RequestLogger reqlog, String path, RequestParameters params, Object inputMimetype, Object value, String outputMimetype) { if (path == null) throw new IllegalArgumentException("Write with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), inputMimetype, outputMimetype); if (logger.isInfoEnabled()) logger.info("Putting {}", path); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .put(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.put(ClientResponse.class, reqlog.copyContent(value)); else response = builder.put(ClientResponse.class, value); } if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPut(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype) { if (path == null) throw new IllegalArgumentException("Write with null path"); MultiPart multiPart = new MultiPart(); boolean hasStreamingPart = addParts(reqlog, multiPart, inputMimetypes, values); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE), outputMimetype); if (logger.isInfoEnabled()) logger.info("Putting multipart for {}", path); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); ClientResponse response = builder.put(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPost(RequestLogger reqlog, String path, RequestParameters params, Object inputMimetype, Object value, Object outputMimetype) { if (path == null) throw new IllegalArgumentException("Apply with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), inputMimetype, outputMimetype); if (logger.isInfoEnabled()) logger.info("Posting {}", path); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .post(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.post(ClientResponse.class, reqlog.copyContent(value)); else response = builder.post(ClientResponse.class, value); } if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPost(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, Object outputMimetype) { if (path == null) throw new IllegalArgumentException("Apply with null path"); MultiPart multiPart = new MultiPart(); boolean hasStreamingPart = addParts(reqlog, multiPart, inputMimetypes, values); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE), outputMimetype); if (logger.isInfoEnabled()) logger.info("Posting multipart for {}", path); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); ClientResponse response = builder.post(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doDelete(RequestLogger reqlog, String path, RequestParameters params, String mimetype) { if (path == null) throw new IllegalArgumentException("Delete with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), null, mimetype); if (logger.isInfoEnabled()) logger.info("Deleting {}", path); ClientResponse response = builder.delete(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; return response; } private MultivaluedMap<String, String> convertParams( RequestParameters params) { if (params == null || params.size() == 0) return null; MultivaluedMap<String, String> requestParams = new MultivaluedMapImpl(); requestParams.putAll(params); return requestParams; } private boolean addParts(RequestLogger reqlog, MultiPart multiPart, String[] mimetypes, Object[] values) { if (mimetypes == null || mimetypes.length == 0) throw new IllegalArgumentException( "mime types not specified for multipart"); if (values == null || values.length == 0) throw new IllegalArgumentException( "values not specified for multipart"); if (mimetypes.length != values.length) throw new IllegalArgumentException( "mistmatch between mime types and values for multipart"); multiPart.setMediaType(new MediaType("multipart", "mixed")); boolean hasStreamingPart = false; for (int i = 0; i < mimetypes.length; i++) { if (mimetypes[i] == null) throw new IllegalArgumentException("null mimetype: " + i); String[] typeParts = mimetypes[i].contains("/") ? mimetypes[i] .split("/", 2) : null; MediaType typePart = (typeParts != null) ? new MediaType( typeParts[0], typeParts[1]) : MediaType.WILDCARD_TYPE; BodyPart bodyPart = null; if (values[i] instanceof OutputStreamSender) { hasStreamingPart = true; bodyPart = new BodyPart(new StreamingOutputImpl( (OutputStreamSender) values[i], reqlog), typePart); } else { if (values[i] instanceof InputStream || values[i] instanceof Reader) hasStreamingPart = true; if (reqlog != null) bodyPart = new BodyPart(reqlog.copyContent(values[i]), typePart); else bodyPart = new BodyPart(values[i], typePart); } multiPart = multiPart.bodyPart(bodyPart); } return hasStreamingPart; } private WebResource.Builder makeBuilder(String path, MultivaluedMap<String, String> params, Object inputMimetype, Object outputMimetype) { WebResource resource = (params == null) ? connection.path(path) : connection.path(path).queryParams(params); WebResource.Builder builder = resource.getRequestBuilder(); if (inputMimetype == null) { } else if (inputMimetype instanceof String) { builder = builder.type((String) inputMimetype); } else if (inputMimetype instanceof MediaType) { builder = builder.type((MediaType) inputMimetype); } else { throw new IllegalArgumentException( "Unknown input mimetype specifier " + inputMimetype.getClass().getName()); } if (outputMimetype == null) { } else if (outputMimetype instanceof String) { builder = builder.accept((String) outputMimetype); } else if (outputMimetype instanceof MediaType) { builder = builder.accept((MediaType) outputMimetype); } else { throw new IllegalArgumentException( "Unknown output mimetype specifier " + outputMimetype.getClass().getName()); } return builder; } private void checkStatus(ClientResponse response, String operation, String entityType, String path, ResponseStatus expected) { ClientResponse.Status status = response.getClientResponseStatus(); if (!expected.isExpected(status)) { response.close(); if (status == ClientResponse.Status.NOT_FOUND) { throw new ResourceNotFoundException("Could not " + operation + " " + entityType + " at " + path, extractErrorFields(response)); } if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to " + operation + " " + entityType + " at " + path, extractErrorFields(response)); } throw new FailedRequestException("failed to " + operation + " " + entityType + " at " + path + ": " + status.getReasonPhrase(), extractErrorFields(response)); } } private <T> T makeResult(RequestLogger reqlog, String operation, String entityType, ClientResponse response, Class<T> as) { if (as == null) return null; logRequest(reqlog, "%s for %s", operation, entityType); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } private Object[] makeResults(RequestLogger reqlog, String operation, String entityType, ClientResponse response, Class[] as) { if (as == null || as.length == 0) return null; logRequest(reqlog, "%s for %s", operation, entityType); MultiPart entity = response.getEntity(MultiPart.class); if (entity == null) return null; List<BodyPart> partList = entity.getBodyParts(); if (partList == null) return null; int partCount = partList.size(); if (partCount == 0) return null; if (partCount != as.length) throw new FailedRequestException("read expected " + as.length + " parts but got " + partCount + " parts"); Object[] parts = new Object[partCount]; for (int i = 0; i < partCount; i++) { Object part = partList.get(i).getEntityAs(as[i]); parts[i] = (reqlog != null) ? reqlog.copyContent(part) : part; } response.close(); return parts; } private void logRequest(RequestLogger reqlog, String message, Object... params) { if (reqlog == null) return; PrintStream out = reqlog.getPrintStream(); if (out == null) return; if (params == null || params.length == 0) { out.println(message); } else { out.format(message, params); out.println(); } } private String stringJoin(Collection collection, String separator, String defaultValue) { if (collection == null || collection.size() == 0) return defaultValue; StringBuilder builder = null; for (Object value : collection) { if (builder == null) builder = new StringBuilder(); else builder.append(separator); builder.append(value); } return (builder != null) ? builder.toString() : null; } // backdoors for testing public Client getClient() { return client; } public WebResource getConnection() { return connection; } }
src/main/java/com/marklogic/client/impl/JerseyServices.java
/* * Copyright 2012 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.impl; import java.io.InputStream; import java.io.PrintStream; import java.io.Reader; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import com.marklogic.client.config.DeleteQueryDefinition; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.auth.params.AuthPNames; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.params.AuthPolicy; import org.apache.http.client.params.ClientPNames; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.scheme.SchemeSocketFactory; import org.apache.http.conn.ssl.AbstractVerifier; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.marklogic.client.ContentDescriptor; import com.marklogic.client.DatabaseClientFactory.Authentication; import com.marklogic.client.DatabaseClientFactory.SSLHostnameVerifier; import com.marklogic.client.DocumentDescriptor; import com.marklogic.client.DocumentManager.Metadata; import com.marklogic.client.ElementLocator; import com.marklogic.client.FailedRequestException; import com.marklogic.client.ForbiddenUserException; import com.marklogic.client.Format; import com.marklogic.client.KeyLocator; import com.marklogic.client.MarkLogicInternalException; import com.marklogic.client.QueryManager; import com.marklogic.client.RequestLogger; import com.marklogic.client.RequestParameters; import com.marklogic.client.ResourceNotFoundException; import com.marklogic.client.ValueLocator; import com.marklogic.client.config.KeyValueQueryDefinition; import com.marklogic.client.config.QueryDefinition; import com.marklogic.client.config.StringQueryDefinition; import com.marklogic.client.config.StructuredQueryDefinition; import com.marklogic.client.config.ValuesDefinition; import com.marklogic.client.config.ValuesListDefinition; import com.marklogic.client.io.OutputStreamSender; import com.marklogic.client.io.marker.AbstractReadHandle; import com.marklogic.client.io.marker.AbstractWriteHandle; import com.marklogic.client.io.marker.DocumentMetadataReadHandle; import com.marklogic.client.io.marker.DocumentMetadataWriteHandle; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; import com.sun.jersey.api.client.filter.HTTPDigestAuthFilter; import com.sun.jersey.client.apache4.ApacheHttpClient4; import com.sun.jersey.client.apache4.config.ApacheHttpClient4Config; import com.sun.jersey.client.apache4.config.DefaultApacheHttpClient4Config; import com.sun.jersey.core.util.MultivaluedMapImpl; import com.sun.jersey.multipart.BodyPart; import com.sun.jersey.multipart.Boundary; import com.sun.jersey.multipart.MultiPart; import com.sun.jersey.multipart.MultiPartMediaTypes; public class JerseyServices implements RESTServices { static final private Logger logger = LoggerFactory .getLogger(JerseyServices.class); static final String ERROR_NS = "http://marklogic.com/rest-api"; protected class HostnameVerifierAdapter extends AbstractVerifier { private SSLHostnameVerifier verifier; protected HostnameVerifierAdapter(SSLHostnameVerifier verifier) { super(); this.verifier = verifier; } @Override public void verify(String hostname, String[] cns, String[] subjectAlts) throws SSLException { verifier.verify(hostname, cns, subjectAlts); } } private ApacheHttpClient4 client; private WebResource connection; private boolean isFirstRequest = true; public JerseyServices() { } private FailedRequest extractErrorFields(ClientResponse response) { InputStream is = response.getEntityInputStream(); try { FailedRequest handler = new FailedRequest(is); return handler; } catch (RuntimeException e) { throw (e); } finally { response.close(); } } @Override public void connect(String host, int port, String user, String password, Authentication type, SSLContext context, SSLHostnameVerifier verifier) { X509HostnameVerifier x509Verifier = null; if (verifier == null) ; else if (verifier == SSLHostnameVerifier.ANY) x509Verifier = SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; else if (verifier == SSLHostnameVerifier.COMMON) x509Verifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; else if (verifier == SSLHostnameVerifier.STRICT) x509Verifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER; else if (context != null && verifier != null) x509Verifier = new HostnameVerifierAdapter(verifier); else if (context != null) x509Verifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; else if (verifier != null) throw new IllegalArgumentException( "Null SSLContent but non-null SSLHostnameVerifier for client"); connect(host, port, user, password, type, context, x509Verifier); } private void connect(String host, int port, String user, String password, Authentication type, SSLContext context, X509HostnameVerifier verifier) { if (logger.isInfoEnabled()) logger.info("Connecting to {} at {} as {}", new Object[] { host, port, user }); if (host == null) throw new IllegalArgumentException("No host provided"); if (user == null) throw new IllegalArgumentException("No user provided"); if (password == null) throw new IllegalArgumentException("No password provided"); if (type == null) { if (context != null) { type = Authentication.BASIC; } else { throw new IllegalArgumentException( "No authentication type provided"); } } if (connection != null) connection = null; if (client != null) { client.destroy(); client = null; } // TODO: integrated control of HTTP Client and Jersey Client logging System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); System.setProperty( "org.apache.commons.logging.simplelog.log.httpclient.wire.header", "warn"); System.setProperty( "org.apache.commons.logging.simplelog.log.org.apache.commons.httpclient", "warn"); Scheme scheme = null; if (context == null) { SchemeSocketFactory socketFactory = PlainSocketFactory .getSocketFactory(); scheme = new Scheme("http", port, socketFactory); } else { SSLSocketFactory socketFactory = new SSLSocketFactory(context, verifier); scheme = new Scheme("https", port, socketFactory); } SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(scheme); ThreadSafeClientConnManager connMgr = new ThreadSafeClientConnManager( schemeRegistry); connMgr.setDefaultMaxPerRoute(100); CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials(new AuthScope(host, port), new UsernamePasswordCredentials(user, password)); List<String> authpref = new ArrayList<String>(); if (type == Authentication.BASIC) authpref.add(AuthPolicy.BASIC); else if (type == Authentication.DIGEST) authpref.add(AuthPolicy.DIGEST); else throw new MarkLogicInternalException( "Internal error - unknown authentication type: " + type.name()); HttpParams httpParams = new BasicHttpParams(); httpParams.setParameter(AuthPNames.PROXY_AUTH_PREF, authpref); // note that setting PROPERTY_FOLLOW_REDIRECTS below doesn't seem to // work httpParams.setBooleanParameter(ClientPNames.HANDLE_REDIRECTS, false); DefaultApacheHttpClient4Config config = new DefaultApacheHttpClient4Config(); Map<String, Object> configProps = config.getProperties(); configProps .put(ApacheHttpClient4Config.PROPERTY_PREEMPTIVE_BASIC_AUTHENTICATION, false); configProps.put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, connMgr); configProps.put(ApacheHttpClient4Config.PROPERTY_FOLLOW_REDIRECTS, false); // configProps.put(ApacheHttpClient4Config.PROPERTY_CREDENTIALS_PROVIDER, // credentialsProvider); configProps.put(ApacheHttpClient4Config.PROPERTY_HTTP_PARAMS, httpParams); // configProps.put(ApacheHttpClient4Config.PROPERTY_CHUNKED_ENCODING_SIZE, // 0); client = ApacheHttpClient4.create(config); // System.setProperty("javax.net.debug", "all"); // all or ssl if (type == Authentication.BASIC) client.addFilter(new HTTPBasicAuthFilter(user, password)); else if (type == Authentication.DIGEST) client.addFilter(new HTTPDigestAuthFilter(user, password)); else throw new MarkLogicInternalException( "Internal error - unknown authentication type: " + type.name()); connection = client.resource(((context == null) ? "http" : "https") + "://" + host + ":" + port + "/v1/"); } @Override public void release() { if (client == null) return; if (logger.isInfoEnabled()) logger.info("Releasing connection"); connection = null; // client.getClientHandler().getHttpClient().getConnectionManager().shutdown(); client.destroy(); client = null; isFirstRequest = true; } private void makeFirstRequest() { connection.path("ping").head(); } @Override public void deleteDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document delete for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Deleting {} in transaction {}", uri, transactionId); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, categories, transactionId, null)); WebResource.Builder builder = addVersionHeader(desc, webResource.getRequestBuilder(), "If-Match"); ClientResponse response = builder.delete(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) { response.close(); throw new ResourceNotFoundException( "Could not delete non-existent document"); } if (status == ClientResponse.Status.FORBIDDEN) { // TODO: inspect response structure to distinguish from insufficient privilege if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to delete document", extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to delete documents",extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) { response.close(); throw new FailedRequestException( "Content version must match to delete document"); } if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s document", uri); } @Override public boolean getDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, DocumentMetadataReadHandle metadataHandle, AbstractReadHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { HandleImplementation metadataBase = HandleAccessor.checkHandle( metadataHandle, "metadata"); HandleImplementation contentBase = HandleAccessor.checkHandle( contentHandle, "content"); String metadataFormat = null; String metadataMimetype = null; if (metadataBase != null) { metadataFormat = metadataBase.getFormat().toString().toLowerCase(); metadataMimetype = metadataBase.getMimetype(); } String contentMimetype = null; if (contentBase != null) { contentMimetype = contentBase.getMimetype(); } if (metadataBase != null && contentBase != null) { return getDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataFormat, metadataHandle, contentHandle); } else if (metadataBase != null) { return getDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle); } else if (contentBase != null) { return getDocumentImpl(reqlog, desc, transactionId, null, extraParams, contentMimetype, contentHandle); } return false; } private boolean getDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String mimetype, AbstractReadHandle handle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document read for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Getting {} in transaction {}", uri, transactionId); WebResource.Builder builder = makeDocumentResource( makeDocumentParams(uri, categories, transactionId, extraParams)) .accept(mimetype); if (extraParams != null && extraParams.containsKey("range")) builder = builder.header("range", extraParams.get("range").get(0)); builder = addVersionHeader(desc, builder, "If-None-Match"); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not read non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to read documents", extractErrorFields(response)); if (status == ClientResponse.Status.NOT_MODIFIED) { response.close(); return false; } if (status != ClientResponse.Status.OK && status != ClientResponse.Status.PARTIAL_CONTENT) throw new FailedRequestException("read failed: " + status.getReasonPhrase(), extractErrorFields(response)); logRequest( reqlog, "read %s document from %s transaction with %s mime type and %s metadata categories", uri, (transactionId != null) ? transactionId : "no", (mimetype != null) ? mimetype : "no", stringJoin(categories, ", ", "no")); HandleImplementation handleBase = HandleAccessor.as(handle); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); if (isExternalDescriptor(desc)) { updateVersion(desc, responseHeaders); updateDescriptor(desc, responseHeaders); copyDescriptor(desc, handleBase); } else { updateDescriptor(handleBase, responseHeaders); } Class as = handleBase.receiveAs(); Object entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); handleBase.receiveContent( (reqlog != null) ? reqlog.copyContent(entity) : entity); return true; } private boolean getDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String metadataFormat, DocumentMetadataReadHandle metadataHandle, AbstractReadHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document read for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Getting multipart for {} in transaction {}", uri, transactionId); MultivaluedMap<String, String> docParams = makeDocumentParams(uri, categories, transactionId, extraParams, true); docParams.add("format", metadataFormat); WebResource.Builder builder = makeDocumentResource(docParams).accept( Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); builder = addVersionHeader(desc, builder, "If-None-Match"); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not read non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to read documents", extractErrorFields(response)); if (status == ClientResponse.Status.NOT_MODIFIED) { response.close(); return false; } if (status != ClientResponse.Status.OK) throw new FailedRequestException("read failed: " + status.getReasonPhrase(), extractErrorFields(response)); logRequest( reqlog, "read %s document from %s transaction with %s metadata categories and content", uri, (transactionId != null) ? transactionId : "no", stringJoin(categories, ", ", "no")); MultiPart entity = response.getEntity(MultiPart.class); if (entity == null) return false; List<BodyPart> partList = entity.getBodyParts(); if (partList == null) return false; int partCount = partList.size(); if (partCount == 0) return false; if (partCount != 2) throw new FailedRequestException("read expected 2 parts but got " + partCount + " parts"); HandleImplementation metadataBase = HandleAccessor.as(metadataHandle); HandleImplementation contentBase = HandleAccessor.as(contentHandle); BodyPart contentPart = partList.get(1); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); MultivaluedMap<String, String> contentHeaders = contentPart.getHeaders(); if (isExternalDescriptor(desc)) { updateVersion(desc, responseHeaders); updateFormat(desc, responseHeaders); updateMimetype(desc, contentHeaders); desc.setByteLength(ContentDescriptor.UNKNOWN_LENGTH); copyDescriptor(desc, contentBase); } else if (contentBase != null) { updateFormat(contentBase, responseHeaders); updateMimetype(contentBase, contentHeaders); contentBase.setByteLength(ContentDescriptor.UNKNOWN_LENGTH); } metadataBase.receiveContent( partList.get(0).getEntityAs(metadataBase.receiveAs()) ); Object contentEntity = contentPart.getEntityAs( contentBase.receiveAs()); contentBase.receiveContent( (reqlog != null) ? reqlog.copyContent(contentEntity) : contentEntity); response.close(); return true; } @Override public DocumentDescriptor head(RequestLogger reqlog, String uri, String transactionId) throws ForbiddenUserException, FailedRequestException { if (uri == null) throw new IllegalArgumentException( "Existence check for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Requesting head for {} in transaction {}", uri, transactionId); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, null, transactionId, null)); ClientResponse response = webResource.head(); MultivaluedMap<String, String> responseHeaders = response.getHeaders(); ClientResponse.Status status = response.getClientResponseStatus(); if (status != ClientResponse.Status.OK) { if (status == ClientResponse.Status.NOT_FOUND) { response.close(); return null; } else if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to check the existence of documents", extractErrorFields(response)); else throw new FailedRequestException( "Document existence check failed: " + status.getReasonPhrase(), extractErrorFields(response)); } response.close(); logRequest(reqlog, "checked %s document from %s transaction", uri, (transactionId != null) ? transactionId : "no"); DocumentDescriptorImpl desc = new DocumentDescriptorImpl(uri,false); updateVersion(desc,responseHeaders); updateDescriptor(desc, responseHeaders); return desc; } @Override public void putDocument(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, DocumentMetadataWriteHandle metadataHandle, AbstractWriteHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { HandleImplementation metadataBase = HandleAccessor.checkHandle( metadataHandle, "metadata"); HandleImplementation contentBase = HandleAccessor.checkHandle( contentHandle, "content"); String metadataMimetype = null; if (metadataBase != null) { metadataMimetype = metadataBase.getMimetype(); } Format descFormat = desc.getFormat(); String contentMimetype = (descFormat != null && descFormat != Format.UNKNOWN) ? desc.getMimetype() : null; if (contentMimetype == null && contentBase != null) { Format contentFormat = contentBase.getFormat(); if (descFormat != null && descFormat != contentFormat) { contentMimetype = descFormat.getDefaultMimetype(); } else if (contentFormat != null && contentFormat != Format.UNKNOWN) { contentMimetype = contentBase.getMimetype(); } } if (metadataBase != null && contentBase != null) { putDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle, contentMimetype, contentHandle); } else if (metadataBase != null) { putDocumentImpl(reqlog, desc, transactionId, categories, extraParams, metadataMimetype, metadataHandle); } else if (contentBase != null) { putDocumentImpl(reqlog, desc, transactionId, null, extraParams, contentMimetype, contentHandle); } } private void putDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String mimetype, AbstractWriteHandle handle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document write for document identifier without uri"); Object value = HandleAccessor.as(handle).sendContent(); if (value == null) throw new IllegalArgumentException( "Document write with null value for " + uri); if (logger.isInfoEnabled()) logger.info("Putting {} in transaction {}", uri, transactionId); logRequest( reqlog, "writing %s document from %s transaction with %s mime type and %s metadata categories", uri, (transactionId != null) ? transactionId : "no", (mimetype != null) ? mimetype : "no", stringJoin(categories, ", ", "no")); WebResource webResource = makeDocumentResource(makeDocumentParams(uri, categories, transactionId, extraParams)); WebResource.Builder builder = webResource .type((mimetype != null) ? mimetype : MediaType.WILDCARD); builder = addVersionHeader(desc, builder, "If-Match"); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .put(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); if (isFirstRequest) isFirstRequest = false; } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.put(ClientResponse.class, reqlog.copyContent(value)); else response = builder.put(ClientResponse.class, value); if (isFirstRequest) isFirstRequest = false; } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException( "Could not write non-existent document", extractErrorFields(response)); if (status == ClientResponse.Status.FORBIDDEN) { if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to write document",extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to write documents",extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) throw new FailedRequestException( "Content version must match to write document", extractErrorFields(response)); if (status != ClientResponse.Status.CREATED && status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("write failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } private void putDocumentImpl(RequestLogger reqlog, DocumentDescriptor desc, String transactionId, Set<Metadata> categories, RequestParameters extraParams, String metadataMimetype, DocumentMetadataWriteHandle metadataHandle, String contentMimetype, AbstractWriteHandle contentHandle) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { String uri = desc.getUri(); if (uri == null) throw new IllegalArgumentException( "Document write for document identifier without uri"); if (logger.isInfoEnabled()) logger.info("Putting multipart for {} in transaction {}", uri, transactionId); logRequest( reqlog, "writing %s document from %s transaction with %s metadata categories and content", uri, (transactionId != null) ? transactionId : "no", stringJoin(categories, ", ", "no")); boolean hasStreamingPart = false; MultiPart multiPart = new MultiPart(); multiPart.setMediaType(new MediaType("multipart", "mixed")); for (int i = 0; i < 2; i++) { String mimetype = null; Object value = null; if (i == 0) { mimetype = metadataMimetype; value = HandleAccessor.as(metadataHandle).sendContent(); } else { mimetype = contentMimetype; value = HandleAccessor.as(contentHandle).sendContent(); } String[] typeParts = mimetype.contains("/") ? mimetype .split("/", 2) : null; MediaType typePart = (typeParts != null) ? new MediaType( typeParts[0], typeParts[1]) : MediaType.WILDCARD_TYPE; BodyPart bodyPart = null; if (value instanceof OutputStreamSender) { hasStreamingPart = true; bodyPart = new BodyPart(new StreamingOutputImpl( (OutputStreamSender) value, reqlog), typePart); } else { if (value instanceof InputStream || value instanceof Reader) hasStreamingPart = true; if (reqlog != null) bodyPart = new BodyPart(reqlog.copyContent(value), typePart); else bodyPart = new BodyPart(value, typePart); } multiPart = multiPart.bodyPart(bodyPart); } MultivaluedMap<String, String> docParams = makeDocumentParams(uri, categories, transactionId, extraParams, true); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); WebResource.Builder builder = makeDocumentResource(docParams).type( Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE) ); builder = addVersionHeader(desc, builder, "If-Match"); ClientResponse response = builder.put(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.NOT_FOUND) { response.close(); throw new ResourceNotFoundException( "Could not write non-existent document"); } if (status == ClientResponse.Status.FORBIDDEN) { // TODO: inspect response structure to distinguish from insufficient privilege if (desc instanceof DocumentDescriptorImpl && ((DocumentDescriptorImpl) desc).isInternal() == false && desc.getVersion() == DocumentDescriptor.UNKNOWN_VERSION) throw new FailedRequestException("Content version required to write document", extractErrorFields(response)); throw new ForbiddenUserException("User is not allowed to write documents", extractErrorFields(response)); } if (status == ClientResponse.Status.PRECONDITION_FAILED) { response.close(); throw new FailedRequestException( "Content version must match to write document"); } if (status != ClientResponse.Status.CREATED && status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("write failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } @Override public String openTransaction(String name, int timeLimit) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Opening transaction"); MultivaluedMap<String, String> transParams = null; if (name != null || timeLimit > 0) { transParams = new MultivaluedMapImpl(); if (name != null) transParams.add("name", name); if (timeLimit > 0) transParams.add("timeLimit", String.valueOf(timeLimit)); } WebResource resource = (transParams != null) ? connection.path( "transactions").queryParams(transParams) : connection .path("transactions"); ClientResponse response = resource.post(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to open transactions", extractErrorFields(response)); if (status != ClientResponse.Status.SEE_OTHER) throw new FailedRequestException("transaction open failed: " + status.getReasonPhrase(), extractErrorFields(response)); String location = response.getHeaders().getFirst("Location"); response.close(); if (location == null) throw new MarkLogicInternalException( "transaction open failed to provide location"); if (!location.contains("/")) throw new MarkLogicInternalException( "transaction open produced invalid location " + location); return location.substring(location.lastIndexOf("/") + 1); } @Override public void commitTransaction(String transactionId) throws ForbiddenUserException, FailedRequestException { completeTransaction(transactionId, "commit"); } @Override public void rollbackTransaction(String transactionId) throws ForbiddenUserException, FailedRequestException { completeTransaction(transactionId, "rollback"); } private void completeTransaction(String transactionId, String result) throws ForbiddenUserException, FailedRequestException { if (result == null) throw new MarkLogicInternalException( "transaction completion without operation"); if (transactionId == null) throw new MarkLogicInternalException( "transaction completion without id: " + result); if (logger.isInfoEnabled()) logger.info("Completing transaction {} with {}", transactionId, result); MultivaluedMap<String, String> transParams = new MultivaluedMapImpl(); transParams.add("result", result); ClientResponse response = connection .path("transactions/" + transactionId).queryParams(transParams) .post(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException( "User is not allowed to complete transaction with " + result, extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("transaction " + result + " failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); } private MultivaluedMap<String, String> makeDocumentParams(String uri, Set<Metadata> categories, String transactionId, RequestParameters extraParams) { return makeDocumentParams(uri, categories, transactionId, extraParams, false); } private MultivaluedMap<String, String> makeDocumentParams(String uri, Set<Metadata> categories, String transactionId, RequestParameters extraParams, boolean withContent) { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); if (extraParams != null && extraParams.size() > 0) { for (Map.Entry<String, List<String>> entry : extraParams.entrySet()) { String extraKey = entry.getKey(); if (!"range".equalsIgnoreCase(extraKey)) docParams.put(extraKey, entry.getValue()); } } docParams.add("uri", uri); if (categories == null || categories.size() == 0) { docParams.add("category", "content"); } else { if (withContent) docParams.add("category", "content"); if (categories.contains(Metadata.ALL)) { for (String category : new String[] { "collections", "permissions", "properties", "quality" }) docParams.add("category", category); } else { for (Metadata category : categories) docParams.add("category", category.name().toLowerCase()); } } if (transactionId != null) docParams.add("txid", transactionId); return docParams; } private WebResource makeDocumentResource( MultivaluedMap<String, String> queryParams) { return connection.path("documents").queryParams(queryParams); } private boolean isExternalDescriptor(ContentDescriptor desc) { return desc != null && desc instanceof DocumentDescriptorImpl && !((DocumentDescriptorImpl) desc).isInternal(); } private void updateDescriptor(ContentDescriptor desc, MultivaluedMap<String, String> headers) { if (desc == null || headers == null) return; updateFormat(desc, headers); updateMimetype(desc, headers); updateLength(desc, headers); } private void copyDescriptor(DocumentDescriptor desc, HandleImplementation handleBase) { if (handleBase == null) return; handleBase.setFormat(desc.getFormat()); handleBase.setMimetype(desc.getMimetype()); handleBase.setByteLength(desc.getByteLength()); } private void updateFormat(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { if (headers.containsKey("vnd.marklogic.document-format")) { List<String> values = headers.get("vnd.marklogic.document-format"); if (values != null) { Format format = Format.valueOf(values.get(0).toUpperCase()); if (format != null) { descriptor.setFormat(format); } } } } private void updateMimetype(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { if (headers.containsKey("Content-Type")) { List<String> values = headers.get("Content-Type"); if (values != null) { String contentType = values.get(0); String mimetype = contentType.contains(";") ? contentType .substring(0, contentType.indexOf(";")) : contentType; // TODO: if "; charset=foo" set character set if (mimetype != null && mimetype.length() > 0) { descriptor.setMimetype(mimetype); } } } } private void updateLength(ContentDescriptor descriptor, MultivaluedMap<String, String> headers) { long length = ContentDescriptor.UNKNOWN_LENGTH; if (headers.containsKey("Content-Length")) { List<String> values = headers.get("Content-Length"); if (values != null) { length = Long.valueOf(values.get(0)); } } descriptor.setByteLength(length); } private void updateVersion(DocumentDescriptor descriptor, MultivaluedMap<String, String> headers) { long version = DocumentDescriptor.UNKNOWN_VERSION; if (headers.containsKey("ETag")) { List<String> values = headers.get("ETag"); if (values != null) { version = Long.valueOf(values.get(0)); } } descriptor.setVersion(version); } private WebResource.Builder addVersionHeader(DocumentDescriptor desc, WebResource.Builder builder, String name) { if (desc != null && desc instanceof DocumentDescriptorImpl && !((DocumentDescriptorImpl) desc).isInternal()) { long version = desc.getVersion(); if (version != DocumentDescriptor.UNKNOWN_VERSION) { return builder.header(name, String.valueOf(version)); } } return builder; } @Override public <T> T search(Class<T> as, QueryDefinition queryDef, String mimetype, long start, long len, QueryManager.ResponseViews views, String transactionId) throws ForbiddenUserException, FailedRequestException { RequestParameters params = new RequestParameters(); ClientResponse response = null; if (start > 1) { params.put("start", "" + start); } if (len > 0) { params.put("pageLength", "" + len); } for (QueryManager.QueryView view : views) { if (view == QueryManager.QueryView.SEARCH) { params.put("view", "search"); } else if (view == QueryManager.QueryView.FACETS) { params.put("view", "facets"); } else if (view == QueryManager.QueryView.METRICS) { params.put("view", "metrics"); } } if (queryDef.getDirectory() != null) { params.put("directory", queryDef.getDirectory()); } for (String collection : queryDef.getCollections()) { params.put("collection", collection); } if (transactionId != null) { params.put("txid", transactionId); } String optionsName = queryDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { params.put("options", optionsName); } if (queryDef instanceof StringQueryDefinition) { String text = ((StringQueryDefinition) queryDef).getCriteria(); if (logger.isInfoEnabled()) logger.info("Searching for {} in transaction {}", text, transactionId); if (text != null) { params.put("q", text); } response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; } else if (queryDef instanceof KeyValueQueryDefinition) { Map<ValueLocator, String> pairs = ((KeyValueQueryDefinition) queryDef); if (logger.isInfoEnabled()) logger.info("Searching for keys/values in transaction {}", transactionId); for (ValueLocator loc : pairs.keySet()) { if (loc instanceof KeyLocator) { params.put("key", ((KeyLocator) loc).getKey()); } else { ElementLocator eloc = (ElementLocator) loc; params.put("element", eloc.getElement().toString()); if (eloc.getAttribute() != null) { params.put("attribute", eloc.getAttribute().toString()); } } params.put("value", pairs.get(loc)); } response = connection.path("keyvalue") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; } else if (queryDef instanceof StructuredQueryDefinition) { String structure = ((StructuredQueryDefinition) queryDef) .serialize(); response = connection.path("search").queryParams(((RequestParametersImplementation) params).getMapImpl()) .type("application/xml") .post(ClientResponse.class, structure); isFirstRequest = false; } else if (queryDef instanceof DeleteQueryDefinition) { if (logger.isInfoEnabled()) logger.info("Searching for deletes in transaction {}", transactionId); response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .accept(mimetype).get(ClientResponse.class); isFirstRequest = false; } else { throw new UnsupportedOperationException("Cannot search with " + queryDef.getClass().getName()); } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public void deleteSearch(DeleteQueryDefinition queryDef, String transactionId) throws ForbiddenUserException, FailedRequestException { RequestParameters params = new RequestParameters(); ClientResponse response = null; if (queryDef.getDirectory() != null) { params.put("directory", queryDef.getDirectory()); } for (String collection : queryDef.getCollections()) { params.put("collection", collection); } if (transactionId != null) { params.put("txid", transactionId); } response = connection.path("search") .queryParams(((RequestParametersImplementation) params).getMapImpl()) .delete(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to delete", extractErrorFields(response)); } if (status != ClientResponse.Status.NO_CONTENT) { throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); } } @Override public <T> T values(Class<T> as, ValuesDefinition valDef, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; String optionsName = valDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { docParams.add("options", optionsName); } if (valDef.getAggregate() != null) { docParams.add("aggregate", valDef.getAggregate()); } if (valDef.getAggregatePath() != null) { docParams.add("aggregatePath", valDef.getAggregatePath()); } if (valDef.getView() != null) { docParams.add("view", valDef.getView()); } if (valDef.getDirection() != null) { if (valDef.getDirection() == ValuesDefinition.Direction.ASCENDING) { docParams.add("direction", "ascending"); } else { docParams.add("direction", "descending"); } } if (valDef.getFrequency() != null) { if (valDef.getFrequency() == ValuesDefinition.Frequency.FRAGMENT) { docParams.add("frequency", "fragment"); } else { docParams.add("frequency", "item"); } } if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "values"; if (valDef.getName() != null) { uri += "/" + valDef.getName(); } response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public <T> T valuesList(Class<T> as, ValuesListDefinition valDef, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; String optionsName = valDef.getOptionsName(); if (optionsName != null && optionsName.length() > 0) { docParams.add("options", optionsName); } if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "values"; response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } @Override public <T> T optionsList(Class<T> as, String mimetype, String transactionId) throws ForbiddenUserException, FailedRequestException { MultivaluedMap<String, String> docParams = new MultivaluedMapImpl(); ClientResponse response = null; if (transactionId != null) { docParams.add("txid", transactionId); } String uri = "config/query"; response = connection.path(uri).queryParams(docParams).accept(mimetype) .get(ClientResponse.class); isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to search", extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException("search failed: " + status.getReasonPhrase(), extractErrorFields(response)); } T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return entity; } // namespaces, search options etc. @Override public <T> T getValue(RequestLogger reqlog, String type, String key, String mimetype, Class<T> as) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Getting {}/{}", type, key); ClientResponse response = connection.path(type + "/" + key) .accept(mimetype).get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status != ClientResponse.Status.OK) { if (status == ClientResponse.Status.NOT_FOUND) { response.close(); return null; } else if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to read " + type, extractErrorFields(response)); else throw new FailedRequestException(type + " read failed: " + status.getReasonPhrase(), extractErrorFields(response)); } logRequest(reqlog, "read %s value with %s key and %s mime type", type, key, (mimetype != null) ? mimetype : null); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } @Override public <T> T getValues(RequestLogger reqlog, String type, String mimetype, Class<T> as) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Getting {}", type); ClientResponse response = connection.path(type).accept(mimetype) .get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to read " + type, extractErrorFields(response)); } if (status != ClientResponse.Status.OK) { throw new FailedRequestException(type + " read failed: " + status.getReasonPhrase(), extractErrorFields(response)); } logRequest(reqlog, "read %s values with %s mime type", type, (mimetype != null) ? mimetype : null); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } @Override public void putValues(RequestLogger reqlog, String type, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}", type); putPostValueImpl(reqlog, "put", type, null, null, mimetype, value, ClientResponse.Status.NO_CONTENT); } @Override public void postValues(RequestLogger reqlog, String type, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}", type); putPostValueImpl(reqlog, "post", type, null, null, mimetype, value, ClientResponse.Status.NO_CONTENT); } @Override public void postValue(RequestLogger reqlog, String type, String key, String mimetype, Object value) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Posting {}/{}", type, key); putPostValueImpl(reqlog, "post", type, key, null, mimetype, value, ClientResponse.Status.CREATED); } @Override public void putValue(RequestLogger reqlog, String type, String key, String mimetype, Object value) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Putting {}/{}", type, key); putPostValueImpl(reqlog, "put", type, key, null, mimetype, value, ClientResponse.Status.NO_CONTENT, ClientResponse.Status.CREATED); } @Override public void putValue(RequestLogger reqlog, String type, String key, RequestParameters extraParams, String mimetype, Object value) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Putting {}/{}", type, key); putPostValueImpl(reqlog, "put", type, key, extraParams, mimetype, value, ClientResponse.Status.NO_CONTENT); } private void putPostValueImpl(RequestLogger reqlog, String method, String type, String key, RequestParameters extraParams, String mimetype, Object value, ClientResponse.Status... expectedStatuses) { if (key != null) { logRequest(reqlog, "writing %s value with %s key and %s mime type", type, key, (mimetype != null) ? mimetype : null); } else { logRequest(reqlog, "writing %s values with %s mime type", type, (mimetype != null) ? mimetype : null); } boolean hasStreamingPart = false; Object sentValue = null; if (value instanceof OutputStreamSender) { hasStreamingPart = true; sentValue = new StreamingOutputImpl((OutputStreamSender) value, reqlog); } else { if (value instanceof InputStream || value instanceof Reader) hasStreamingPart = true; if (reqlog != null) sentValue = reqlog.copyContent(value); else sentValue = value; } MultivaluedMap<String, String> requestParams = convertParams(extraParams); ClientResponse response = null; if ("put".equals(method)) { if (isFirstRequest && hasStreamingPart) makeFirstRequest(); String connectPath = (key != null) ? type + "/" + key : type; WebResource resource = (requestParams == null) ? connection .path(connectPath) : connection.path(connectPath) .queryParams(requestParams); response = resource.type(mimetype).put(ClientResponse.class, sentValue); if (isFirstRequest) isFirstRequest = false; } else if ("post".equals(method)) { if (isFirstRequest && hasStreamingPart) makeFirstRequest(); WebResource resource = (requestParams == null) ? connection .path(type) : connection.path(type).queryParams( requestParams); response = resource.type(mimetype).post(ClientResponse.class, sentValue); if (isFirstRequest) isFirstRequest = false; } else { throw new MarkLogicInternalException("unknown method type " + method); } ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to write " + type, extractErrorFields(response)); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException(type + " not found for write", extractErrorFields(response)); boolean statusOk = false; for (ClientResponse.Status expectedStatus : expectedStatuses) { statusOk = statusOk || (status == expectedStatus); if (statusOk) { break; } } if (!statusOk) { throw new FailedRequestException(type + " write failed: " + status.getReasonPhrase(), extractErrorFields(response)); } response.close(); } @Override public void deleteValue(RequestLogger reqlog, String type, String key) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Deleting {}/{}", type, key); ClientResponse response = connection.path(type + "/" + key).delete( ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to delete " + type, extractErrorFields(response)); if (status == ClientResponse.Status.NOT_FOUND) throw new ResourceNotFoundException(type + " not found for delete", extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s value with %s key", type, key); } @Override public void deleteValues(RequestLogger reqlog, String type) throws ForbiddenUserException, FailedRequestException { if (logger.isInfoEnabled()) logger.info("Deleting {}", type); ClientResponse response = connection.path(type).delete( ClientResponse.class); if (isFirstRequest) isFirstRequest = false; ClientResponse.Status status = response.getClientResponseStatus(); if (status == ClientResponse.Status.FORBIDDEN) throw new ForbiddenUserException("User is not allowed to delete " + type, extractErrorFields(response)); if (status != ClientResponse.Status.NO_CONTENT) throw new FailedRequestException("delete failed: " + status.getReasonPhrase(), extractErrorFields(response)); response.close(); logRequest(reqlog, "deleted %s values", type); } @Override public <T> T getResource(RequestLogger reqlog, String path, RequestParameters params, String mimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doGet(path, params, mimetype); checkStatus(response, "read", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResult(reqlog, "read", "resource", response, as); } @Override public Object[] getResource(RequestLogger reqlog, String path, RequestParameters params, String[] mimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doGet(path, params, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "read", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResults(reqlog, "read", "resource", response, as); } @Override public <T> T putResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPut(reqlog, path, params, inputMimetype, value, outputMimetype); checkStatus(response, "write", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "write", "resource", response, as); } @Override public <T> T putResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPut(reqlog, path, params, inputMimetypes, values, outputMimetype); checkStatus(response, "write", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "write", "resource", response, as); } @Override public Object postResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String outputMimetype, Class as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetype, value, outputMimetype); checkStatus(response, "apply", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "apply", "resource", response, as); } @Override public Object postResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype, Class as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetypes, values, outputMimetype); checkStatus(response, "apply", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResult(reqlog, "apply", "resource", response, as); } @Override public Object[] postResource(RequestLogger reqlog, String path, RequestParameters params, String inputMimetype, Object value, String[] outputMimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetype, value, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "apply", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResults(reqlog, "apply", "resource", response, as); } @Override public Object[] postResource(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String[] outputMimetypes, Class[] as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doPost(reqlog, path, params, inputMimetypes, values, Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE)); checkStatus(response, "apply", "resource", path, (as != null && as.length > 0) ? ResponseStatus.OK : ResponseStatus.CREATED_OR_NO_CONTENT); return makeResults(reqlog, "apply", "resource", response, as); } @Override public <T> T deleteResource(RequestLogger reqlog, String path, RequestParameters params, String outputMimetype, Class<T> as) throws ResourceNotFoundException, ForbiddenUserException, FailedRequestException { ClientResponse response = doDelete(reqlog, path, params, outputMimetype); checkStatus(response, "delete", "resource", path, (as != null) ? ResponseStatus.OK : ResponseStatus.NO_CONTENT); return makeResult(reqlog, "delete", "resource", response, as); } private ClientResponse doGet(String path, RequestParameters params, Object mimetype) { if (path == null) throw new IllegalArgumentException("Read with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), null, mimetype); if (logger.isInfoEnabled()) logger.info("Getting {}", path); ClientResponse response = builder.get(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPut(RequestLogger reqlog, String path, RequestParameters params, Object inputMimetype, Object value, String outputMimetype) { if (path == null) throw new IllegalArgumentException("Write with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), inputMimetype, outputMimetype); if (logger.isInfoEnabled()) logger.info("Putting {}", path); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .put(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.put(ClientResponse.class, reqlog.copyContent(value)); else response = builder.put(ClientResponse.class, value); } if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPut(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, String outputMimetype) { if (path == null) throw new IllegalArgumentException("Write with null path"); MultiPart multiPart = new MultiPart(); boolean hasStreamingPart = addParts(reqlog, multiPart, inputMimetypes, values); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE), outputMimetype); if (logger.isInfoEnabled()) logger.info("Putting multipart for {}", path); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); ClientResponse response = builder.put(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPost(RequestLogger reqlog, String path, RequestParameters params, Object inputMimetype, Object value, Object outputMimetype) { if (path == null) throw new IllegalArgumentException("Apply with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), inputMimetype, outputMimetype); if (logger.isInfoEnabled()) logger.info("Posting {}", path); ClientResponse response = null; if (value instanceof OutputStreamSender) { if (isFirstRequest) makeFirstRequest(); response = builder .post(ClientResponse.class, new StreamingOutputImpl( (OutputStreamSender) value, reqlog)); } else { if (isFirstRequest && (value instanceof InputStream || value instanceof Reader)) makeFirstRequest(); if (reqlog != null) response = builder.post(ClientResponse.class, reqlog.copyContent(value)); else response = builder.post(ClientResponse.class, value); } if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doPost(RequestLogger reqlog, String path, RequestParameters params, String[] inputMimetypes, Object[] values, Object outputMimetype) { if (path == null) throw new IllegalArgumentException("Apply with null path"); MultiPart multiPart = new MultiPart(); boolean hasStreamingPart = addParts(reqlog, multiPart, inputMimetypes, values); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), Boundary.addBoundary(MultiPartMediaTypes.MULTIPART_MIXED_TYPE), outputMimetype); if (logger.isInfoEnabled()) logger.info("Posting multipart for {}", path); if (isFirstRequest && hasStreamingPart) makeFirstRequest(); ClientResponse response = builder.post(ClientResponse.class, multiPart); if (isFirstRequest) isFirstRequest = false; return response; } private ClientResponse doDelete(RequestLogger reqlog, String path, RequestParameters params, String mimetype) { if (path == null) throw new IllegalArgumentException("Delete with null path"); WebResource.Builder builder = makeBuilder(path, ((RequestParametersImplementation) params).getMapImpl(), null, mimetype); if (logger.isInfoEnabled()) logger.info("Deleting {}", path); ClientResponse response = builder.delete(ClientResponse.class); if (isFirstRequest) isFirstRequest = false; return response; } private MultivaluedMap<String, String> convertParams( RequestParameters params) { if (params == null || params.size() == 0) return null; MultivaluedMap<String, String> requestParams = new MultivaluedMapImpl(); requestParams.putAll(params); return requestParams; } private boolean addParts(RequestLogger reqlog, MultiPart multiPart, String[] mimetypes, Object[] values) { if (mimetypes == null || mimetypes.length == 0) throw new IllegalArgumentException( "mime types not specified for multipart"); if (values == null || values.length == 0) throw new IllegalArgumentException( "values not specified for multipart"); if (mimetypes.length != values.length) throw new IllegalArgumentException( "mistmatch between mime types and values for multipart"); multiPart.setMediaType(new MediaType("multipart", "mixed")); boolean hasStreamingPart = false; for (int i = 0; i < mimetypes.length; i++) { if (mimetypes[i] == null) throw new IllegalArgumentException("null mimetype: " + i); String[] typeParts = mimetypes[i].contains("/") ? mimetypes[i] .split("/", 2) : null; MediaType typePart = (typeParts != null) ? new MediaType( typeParts[0], typeParts[1]) : MediaType.WILDCARD_TYPE; BodyPart bodyPart = null; if (values[i] instanceof OutputStreamSender) { hasStreamingPart = true; bodyPart = new BodyPart(new StreamingOutputImpl( (OutputStreamSender) values[i], reqlog), typePart); } else { if (values[i] instanceof InputStream || values[i] instanceof Reader) hasStreamingPart = true; if (reqlog != null) bodyPart = new BodyPart(reqlog.copyContent(values[i]), typePart); else bodyPart = new BodyPart(values[i], typePart); } multiPart = multiPart.bodyPart(bodyPart); } return hasStreamingPart; } private WebResource.Builder makeBuilder(String path, MultivaluedMap<String, String> params, Object inputMimetype, Object outputMimetype) { WebResource resource = (params == null) ? connection.path(path) : connection.path(path).queryParams(params); WebResource.Builder builder = resource.getRequestBuilder(); if (inputMimetype == null) { } else if (inputMimetype instanceof String) { builder = builder.type((String) inputMimetype); } else if (inputMimetype instanceof MediaType) { builder = builder.type((MediaType) inputMimetype); } else { throw new IllegalArgumentException( "Unknown input mimetype specifier " + inputMimetype.getClass().getName()); } if (outputMimetype == null) { } else if (outputMimetype instanceof String) { builder = builder.accept((String) outputMimetype); } else if (outputMimetype instanceof MediaType) { builder = builder.accept((MediaType) outputMimetype); } else { throw new IllegalArgumentException( "Unknown output mimetype specifier " + outputMimetype.getClass().getName()); } return builder; } private void checkStatus(ClientResponse response, String operation, String entityType, String path, ResponseStatus expected) { ClientResponse.Status status = response.getClientResponseStatus(); if (!expected.isExpected(status)) { response.close(); if (status == ClientResponse.Status.NOT_FOUND) { throw new ResourceNotFoundException("Could not " + operation + " " + entityType + " at " + path, extractErrorFields(response)); } if (status == ClientResponse.Status.FORBIDDEN) { throw new ForbiddenUserException("User is not allowed to " + operation + " " + entityType + " at " + path, extractErrorFields(response)); } throw new FailedRequestException("failed to " + operation + " " + entityType + " at " + path + ": " + status.getReasonPhrase(), extractErrorFields(response)); } } private <T> T makeResult(RequestLogger reqlog, String operation, String entityType, ClientResponse response, Class<T> as) { if (as == null) return null; logRequest(reqlog, "%s for %s", operation, entityType); T entity = response.getEntity(as); if (as != InputStream.class && as != Reader.class) response.close(); return (reqlog != null) ? reqlog.copyContent(entity) : entity; } private Object[] makeResults(RequestLogger reqlog, String operation, String entityType, ClientResponse response, Class[] as) { if (as == null || as.length == 0) return null; logRequest(reqlog, "%s for %s", operation, entityType); MultiPart entity = response.getEntity(MultiPart.class); if (entity == null) return null; List<BodyPart> partList = entity.getBodyParts(); if (partList == null) return null; int partCount = partList.size(); if (partCount == 0) return null; if (partCount != as.length) throw new FailedRequestException("read expected " + as.length + " parts but got " + partCount + " parts"); Object[] parts = new Object[partCount]; for (int i = 0; i < partCount; i++) { Object part = partList.get(i).getEntityAs(as[i]); parts[i] = (reqlog != null) ? reqlog.copyContent(part) : part; } response.close(); return parts; } private void logRequest(RequestLogger reqlog, String message, Object... params) { if (reqlog == null) return; PrintStream out = reqlog.getPrintStream(); if (out == null) return; if (params == null || params.length == 0) { out.println(message); } else { out.format(message, params); out.println(); } } private String stringJoin(Collection collection, String separator, String defaultValue) { if (collection == null || collection.size() == 0) return defaultValue; StringBuilder builder = null; for (Object value : collection) { if (builder == null) builder = new StringBuilder(); else builder.append(separator); builder.append(value); } return (builder != null) ? builder.toString() : null; } // backdoors for testing public Client getClient() { return client; } public WebResource getConnection() { return connection; } }
Bug:16852 missing guard for null pointer git-svn-id: 2087087167f935058d19856144d26af79f295c86@108063 62cac252-8da6-4816-9e9d-6dc37b19578c
src/main/java/com/marklogic/client/impl/JerseyServices.java
Bug:16852 missing guard for null pointer
<ide><path>rc/main/java/com/marklogic/client/impl/JerseyServices.java <ide> value = HandleAccessor.as(contentHandle).sendContent(); <ide> } <ide> <del> String[] typeParts = mimetype.contains("/") ? mimetype <del> .split("/", 2) : null; <del> <del> MediaType typePart = (typeParts != null) ? new MediaType( <del> typeParts[0], typeParts[1]) : MediaType.WILDCARD_TYPE; <add> String[] typeParts = (mimetype != null && mimetype.contains("/")) ? <add> mimetype.split("/", 2) : null; <add> <add> MediaType typePart = (typeParts != null) ? <add> new MediaType(typeParts[0], typeParts[1]) : <add> MediaType.WILDCARD_TYPE; <ide> <ide> BodyPart bodyPart = null; <ide> if (value instanceof OutputStreamSender) {
JavaScript
apache-2.0
a59affeccc1ba3ced7edb070e15e9650c91f10f6
0
MacdonaldRobinson/FlexDotnetCMS,MacdonaldRobinson/FlexDotnetCMS,MacdonaldRobinson/FlexDotnetCMS
/// <reference path="../Views/MasterPages/WebService.asmx" /> /// <reference path="../Views/MasterPages/WebService.asmx" /> window.onerror = function (e) { if(e.indexOf("UpdatePanel") !=-1) { window.location.reload(); } } function DisplayJsonException(xhr) { try { var jsonError = JSON.parse(xhr.responseText); jQuery.jGrowl(jsonError.Message, { header: "Error", life: 10000 }); } catch (e) { } } function executeAction(action, id, updatePanelId) { switch (action) { case "Refresh": break; case "Create": var createUrl = "Detail.aspx"; $.colorbox({ href: createUrl, width: colorBoxWidth, height: colorBoxHeight, iframe: true, fixed: true, onClosed: function () { __doPostBack(updatePanelId, ''); } }); break; case "Edit": var editUrl = "Detail.aspx?id=" + id; $.colorbox({ href: editUrl, width: colorBoxWidth, height: colorBoxHeight, iframe: true, fixed: true, onClosed: function () { __doPostBack(updatePanelId, ''); } }); break; case "Delete": if (confirm("Are you sure you want to delete this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/DeleteItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Deleting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "UnDelete": if (confirm("Are you sure you want to un-delete this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/UnDeleteItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Un-Deleting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "DeletePermanently": if (confirm("Are you sure you want to delete this item permanently?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/DeleteItemPermanently", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Removing Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "Approve": if (confirm("Are you sure you want to approve this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/ApproveItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Approving Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "TakeOwnership": if (confirm("Are you sure you want to take ownership of all items assigned to the selected user with ID (" + id + ") ?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/TakeOwnership", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Approving Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "Reject": if (confirm("Are you sure you want to reject this item? Changes will be deleted permanently.")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/RejectItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Rejecting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; } } function getParameterByName(name) { name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]"); var regexS = "[\\?&]" + name + "=([^&#]*)"; var regex = new RegExp(regexS); var results = regex.exec(window.location.href); if (results == null) return ""; else return decodeURIComponent(results[1].replace(/\+/g, " ")); } var BaseWebserverUrl = BaseUrl + "Admin/Views/MasterPages/Webservice.asmx"; function HandleContextMenuClick(action, target, node) { var mediaDetailId = target.parent().attr("mediadetailid"); var targetText = target.text(); switch (action) { case "CreateChild": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/CreateChild", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { window.location.href = BaseUrl + "Admin/Views/PageHandlers/Media/Create.aspx"; }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "Delete": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Delete", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "UnDelete": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/UnDelete", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); //RefreshSiteTreeViewAjaxPanel(); //window.location.reload(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "Duplicate": var newName = prompt("Enter a new name for the page"); newName = newName.trim(); if (newName != null) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Duplicate", data: "{'id':'" + mediaDetailId + "', 'duplicateChildren':false,'newName':'" + newName + "'}", contentType: "application/json; charset=utf-8", success: function (msg) { //window.location.href = parentNode.get_navigateUrl(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "DuplicateIncludingAllChildren": var newName = prompt("Enter a new name for the page"); newName = newName.trim(); if (newName != null) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Duplicate", data: "{'id':'" + mediaDetailId + "', 'duplicateChildren':true,'newName':'" + newName + "'}", contentType: "application/json; charset=utf-8", success: function (msg) { //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); //window.location.href = msg.d.replace("~", ""); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "ShowInMenu": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/ShowInMenu", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "HideFromMenu": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/HideFromMenu", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); loadingOverlay.hide(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "MoveUp": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/MoveUp", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "MoveDown": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/MoveDown", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "DeletePermanently": var areYouSure = confirm('Are you sure you want to permanently delete the item "' + targetText + '" and all its associations, including ALL its child items and history if any? NOTE: This action is irreversible'); if (areYouSure) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/DeletePermanently", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "ClearCache": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/ClearCache", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "ViewFrontEnd": //console.log(target.attr("data-frontendurl")); window.open(target.attr("frontendurl")); break; } } function createAutoCompleteObject(caption, value, meta) { return { caption: caption, value: value, meta: meta }; } function getFieldsAutoComplete() { var wordsArray = []; $("#MainFields label:first-child").each(function () { var labelFor = $(this).attr("for"); if (labelFor != undefined) { var splitFor = $(this).attr("for").split("_"); var text = splitFor[splitFor.length - 1]; //var text = $(this).text().replace(/\s/g, '').replace(":", ""); text = "{" + text + "}"; wordsArray.push(createAutoCompleteObject(text, text, "main")); } }); $(".field > label").each(function () { var text = $(this).attr('data-fieldcode').replace(/\s/g, ''); text = "{Field:" + text + "}"; wordsArray.push(createAutoCompleteObject(text, text, "custom field")); }); wordsArray.push(createAutoCompleteObject( '<Site:GenerateNav', '<Site:GenerateNav runat="server" \ RenderRootMedia="True" \ RootMediaID="2"\ RenderDepth="2"\ DisplayProtectedSections="false" /', 'user control' )); wordsArray.push(createAutoCompleteObject( '<Site:RenderChildren', '<Site:RenderChildren runat="server" \ MediaID="0" \ ShowPager="True" \ PageSize="10" \ ChildPropertyName="UseSummaryLayout" \ Where=\'MediaType.Name=="Page"\' \ OrderBy="DateCreated DESC" />', 'user control' )); wordsArray.push(createAutoCompleteObject( '<Site:RenderMedia', '<Site:RenderMedia runat="server" \ MediaID="2" \ PropertyName="UseSummaryLayout" />', 'user control' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorIfField', '<!-- LayoutsTab:RazorIfField: Razor Code Showing how to load a field and check its value --> \ @{ \ var field = Model.RenderField("test1"); \ \ <ul>\ @if(field == "True")\ {\ <li>If condition is true</li>\ }\ else\ {\ <li>You entered: @Raw(field)</li>\ }\ </ul>\ }', 'razor code' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorLoopAssociatedItems', '<!-- LayoutsTab:RazorLoopAssociatedItems: Razor Code showing how you can load a field and loop through its associated items -->\ @{ \ var field = Model.LoadField("Dropfield");\ \ <ul>\ @foreach(var item in field.FieldAssociations)\ {\ var detail = item.MediaDetail;\ <li><a href="@detail.AbsoluteUrl">@Raw(detail.RenderField("SectionTitle"))</a></li>\ }\ </ul>\ }', 'razor code' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorRenderChildren', '<!-- LayoutsTab:RazorRenderChildren: Razor Code to loop through and render child items -->\ @{\ var mediaId = Model.MediaID; // You can change this to any Media ID to load the children of that page\ var media = MediasMapper.GetByID(mediaId);\ \ if(media != null)\ {\ var mediaDetail = media.GetLiveMediaDetail();\ \ if(mediaDetail != null)\ {\ var childItems = mediaDetail.ChildMediaDetails;\ <ul>\ @foreach(var child in childItems)\ {\ <li><a href="@child.AbsoluteUrl">@Raw(child.RenderField("SectionTitle"))</a></li>\ }\ </ul>\ }\ }\ }', 'razor code' )); return wordsArray; } function launchIntoFullscreen(element) { //element = $(element).parent()[0]; if (element.requestFullscreen) { element.requestFullscreen(); } else if (element.mozRequestFullScreen) { element.mozRequestFullScreen(); } else if (element.webkitRequestFullscreen) { element.webkitRequestFullscreen(); } else if (element.msRequestFullscreen) { element.msRequestFullscreen(); } } function initAceEditors() { var wordList = getFieldsAutoComplete(); $(document).on("change", "#AttachEditorToBrowserPanel", function () { if(!$(this).is(":checked")) { if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].src = $("#PreviewPanel")[0].src; } else { var textarea = $(this).parent().find("textarea"); var value = textarea.val(); if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].contentWindow.document.body.innerHTML = value; } }); $(document).on("click", ".AceEditorFullScreen", function () { var element = $(this).parent().find(".ace_editor")[0]; /*var mainArea = $("#mainArea")[0]; if (mainArea != undefined || mainArea != null) { element = mainArea; }*/ launchIntoFullscreen(element); }); $(".AceEditor").each(function () { var textarea = $(this); var id = $(this).attr("id"); var editorId = $(this).attr("name") + "-editor"; if (document.getElementById(editorId) != null) return; if ($("#PreviewPanel").length > 0) { if ($(this).hasClass("CanAttachToBrowserPanel")) { if ($("#" + id).parent().find("#AttachEditorToBrowserPanel").length == 0) { $("#" + id).parent().prepend("<input type='checkbox' id='AttachEditorToBrowserPanel' /> Attach editor to browser panel"); } } } if ($("#" + id).parent().find(".AceEditorFullScreen").length == 0) { $("#" + id).parent().prepend("<a class='AceEditorFullScreen' href='#' data-editorid='" + editorId +"'>View Full Screen</a><br />"); } var style = $(this).attr("style"); $(this).parent().append("<div id='" + editorId + "' class='ace-editor' style='" + style + "'></div>"); var editor = ace.edit(editorId); textarea.hide(); editor.on('focus', function () { editor.getSession().setUseWorker(true); }); editor.on('blur', function () { editor.getSession().setUseWorker(false); }); var session = editor.getSession(); session.setUseWorker(false); editor.setTheme("ace/theme/iplastic"); editor.setValue(textarea.val(), 1); session.setMode("ace/mode/html"); editor.$blockScrolling = Infinity; editor.$useWorker = false; //editor.clearSelection(); var langTools = ace.require('ace/ext/language_tools'); // enable autocompletion and snippets editor.setOptions({ enableBasicAutocompletion: true, enableSnippets: true, enableLiveAutocompletion: false, showPrintMargin: false, }); var customCompleter = { getCompletions: function (editor, session, pos, prefix, callback) { callback(null, wordList.map(function (autoCompleteObject) { return { caption: autoCompleteObject.caption, value: autoCompleteObject.value, meta: autoCompleteObject.meta }; })); } } editor.completers = [langTools.snippetCompleter, langTools.textCompleter, customCompleter] var htmlBeautifyOptions = { }; editor.commands.addCommand({ name: 'Beautify', bindKey: { win: 'Ctrl-S', mac: 'Command-S' }, exec: function (editor) { var value = editor.getSession().getValue(); // TODO: Format HTML value = value.replace(/<[^]+/, function (match) { /*if (/@for|@if|@[\s]?{|Helper.|!=|List</.test(match)) { return match; }*/ match = html_beautify(match); return match; }); editor.setValue(value, 1); $(".SavePageButton")[0].click(); }, readOnly: true // false if this command should not apply in readOnly mode }); editor.getSession().on('change', function () { var value = editor.getSession().getValue(); textarea.val(value); if ($("#PreviewPanel").length > 0) { if (textarea.parent().find("#AttachEditorToBrowserPanel").is(":checked")) { $("#PreviewPanel")[0].contentWindow.document.body.innerHTML = value; } } }); }); } function destroyAceEditors() { $(".AceEditor").each(function () { var editorId = $(this).attr("name") + "-editor"; var editor = ace.edit(editorId); editor.destroy(); }); } function destroyTinyMCE() { if (typeof (tinyMCE) !== 'undefined') { var length = tinymce.editors.length; for (var i = length; i > 0; i--) { var editor = tinymce.editors[i - 1]; editor.remove(); }; } } $(window).load(function () { //console.log("window load"); //setTimeout(function () { initAceEditors(); initTinyMCE(); //}, 1000); $(document).ajaxComplete(function (event, xhr, settings) { //console.log("ajaxComplete"); if (settings.url.indexOf("Chat.asmx") == -1) { //setTimeout(function () { initAceEditors(); initTinyMCE(); BindJQueryUIControls(); //}, 1000); } }); }); function BindMultiFileUploaderImageLoadError() { $(".MultiFileUploader img").error(function () { $(this).attr("src", BaseUrl + "media/images/icons/File.jpg"); }); } function BindTabs() { $('.tabs').tabs(); } function BindJQueryUIControls() { try { $("select").selectmenu({ change: function (event, ui) { $(event.target).trigger("change"); } }); //$("[class='RadioButtonList']").each(function () { // var inputItems = $(this).find("input"); // var index = $("[class='RadioButtonList']").index(this); // var name = "RadioButtonListItem_" + index; // inputItems.attr("type", "radio") // inputItems.attr("name", name); //}); //$("fieldset input[type=checkbox], fieldset input[type=radio]").checkboxradio(); } catch (ex) { } } $(document).ready(function () { BindJQueryUIControls(); //BindScrollMagic(); $('.tooltip').each(function () { var title = $(this).attr("title"); if(title == undefined || title == "") { $(this).hide(); } }); $('.tooltip').tooltipster({ contentAsHTML: true, interactive: true, maxWidth: 500, trigger: 'click' }); $('ul.sf-menu').superfish(); BindTabs(); $('input.datetimepicker').datetimepicker({ controlType: 'select', oneLine: true, dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss TT' }); $(document).on("keydown", function (event) { if (event.ctrlKey || event.metaKey) { switch (String.fromCharCode(event.which).toLowerCase()) { case 's': $('.SavePageButton')[0].click(); event.preventDefault(); break; } } }); $(document).on('click', '.SavePageButton', function (event) { $(".save-template").click(); var text = $(".SaveFieldButton").text(); if (text.indexOf("Save") != -1) { var autoClickedSaveFieldButton = false; OnUpdatePanelRefreshComplete(function (event) { if (!autoClickedSaveFieldButton) { $(".SaveFieldButton")[0].click(); autoClickedSaveFieldButton = true; //ReloadPreviewPanel(); } }); } return true; }); }); function initTinyMCE() { tinymce.editors = []; tfm_path = BaseUrl + "Scripts/tinyfilemanager.net"; tinymce.init({ selector: ".editor", content_css: BaseUrl + "Views/MasterPages/SiteTemplates/css/main.css, " + BaseUrl + "Admin/Styles/editor.css", menubar: false, plugins: [ 'advlist autolink lists link image charmap print preview hr anchor pagebreak', 'searchreplace wordcount visualblocks visualchars fullscreen', 'insertdatetime media youtube nonbreaking save table contextmenu directionality', 'emoticons template paste textcolor colorpicker textpattern imagetools ace imgmap table map' ], toolbar1: 'undo redo | paste pastetext | bold italic underline strikethrough superscript subscript charmap emoticons | formatselect blockquote | alignleft aligncenter alignright alignjustify outdent indent | bullist numlist | insert table | anchor link image imgmap media youtube map | visualblocks ace', templates: [ ], image_advtab: true, relative_urls: false, convert_urls: false, remove_script_host: false, verify_html: false, valid_children: '+a[div|p|ul|ol|li|h1|span|h2|h3|h4|h5|h5|h6]', extended_valid_elements: 'span[*],a[*],+iframe[src|width|height|name|align|class]', custom_shortcuts: false, setup: function (editor) { editor.on('change', function () { editor.save(); }); editor.on('keydown', function (event) { if (event.ctrlKey || event.metaKey) { switch (String.fromCharCode(event.which).toLowerCase()) { case 's': $('.SavePageButton')[0].click(); event.preventDefault(); break; } } }); } }); } function BindGridViewSortable(CssSelector, WebserviceUrl, UpdatePanelClientId, OnAfterRefreshFunction) { var DragDropGridSortable = $(CssSelector).sortable({ items: 'tr:not(tr:first-child)', cursor: 'crosshair', connectWith: '.DragDropGrid', axis: 'y', dropOnEmpty: true, receive: function (e, ui) { $(this).find("tbody").append(ui.item); }, update: function (event, ui) { var ths = $(this).find("th"); var trs = $(this).find("tr:not(tr:first-child)"); var cols = []; var entries = {}; ths.each(function () { var text = $.trim($(this).text()); if (text != "") cols.push(text); }); var entries = ""; var trsIndex = 0; trs.each(function () { var tds = $(this).find("td"); var propIndex = 0; var properties = ""; tds.each(function () { var col = cols[propIndex]; if (col != undefined) { properties += "\"" + col + "\":\"" + $.trim($(this).text()) + "\""; propIndex++; if (cols[propIndex] != undefined) properties += ","; } }); trsIndex++; var entry = "{" + properties + "}"; if (trs[trsIndex] != undefined) entry += ","; entries += entry; }); var jsonString = "{\"items\":[" + entries + "]}"; if (WebserviceUrl != "") { jQuery.ajax({ type: "POST", url: WebserviceUrl, data: jsonString, contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { var isFunc = jQuery.isFunction(OnAfterRefreshFunction); RefreshUpdatePanel(UpdatePanelClientId, function () { BindGridViewSortable(CssSelector, WebserviceUrl, UpdatePanelClientId); if (isFunc) { OnAfterRefreshFunction(); } }); }, error: function (xhr, status, error) { } }); } else { OnAfterRefreshFunction(); } } }); } $(function () { $('div.split-pane').splitPane(); //$("#SiteTree ul").sortable({ connectWith: "ul" }); //BindTree(); $(document).on('click', '#SiteTree a', function () { window.location.href = $(this).attr("href"); }); }); function RefreshSiteTreeParentNode() { var selected = $("#SiteTree").jstree("get_selected"); if (selected.length > 0) { var selectedId = selected[0]; var parentId = $("#SiteTree").jstree(true).get_parent(selectedId); RefreshSiteTreeNodeById(parentId); } } function RefreshSiteTreeSelectedNode() { var selected = $("#SiteTree").jstree("get_selected"); if (selected.length > 0) { var selectedId = selected[0]; RefreshSiteTreeNodeById(selectedId); } } function RefreshSiteTreeNodeById(nodeId) { var jsTree = $('#SiteTree').jstree(true); if (jsTree != false) jsTree.refresh_node(nodeId); } function ConvertDivToSiteTree(divSelector, targetSelector, mediaId) { var jsTree = $(divSelector).jstree(true); var filterText = ""; if (jsTree != false) { jsTree.destroy(); } $(divSelector).jstree({ "plugins": ["checkbox"], "checkbox": { "keep_selected_style": false }, 'types': { 'default': { 'icon': 'jstree-icon jstree-file' } }, "core": { // so that create works "multiple": true, "cascade":"", 'data': { 'url': function (node) { if (filterText == "" || filterText == undefined || filterText == null) return node.id === '#' ? BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetRootNodes' : BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetChildNodes'; else return BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/SearchForNodes?filterText=' + filterText; }, 'data': function (node) { return { 'id': node.id }; } } } }).on('ready.jstree', function (e, data) { data.instance.uncheck_all(); }).on("changed.jstree", function (node, action, selected, event) { var checkedItems = action.instance.get_checked(); $(targetSelector).val(JSON.stringify(checkedItems)); }) } function BindTree(filterText) { var jsTree = $('#SiteTree').jstree(true); if (jsTree != false) { jsTree.destroy(); } $('#SiteTree').jstree({ 'types': { 'default': { 'icon': 'jstree-icon jstree-file' } }, "core": { // so that create works "check_callback": true, "multiple": false, 'data': { 'url': function (node) { if (filterText == "" || filterText == undefined || filterText == null) return node.id === '#' ? BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetRootNodes' : BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetChildNodes'; else return BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/SearchForNodes?filterText=' + filterText; }, 'data': function (node) { return { 'id': node.id }; } } }, "plugins": ["contextmenu", "dnd", "types"], "contextmenu": { "items": function (node) { return { "Create": { "label": "Create Child", "action": function (obj) { //this.create(obj); HandleContextMenuClick("CreateChild", obj.reference, node); } }, "Delete": { "label": "Mark As Deleted", "action": function (obj) { HandleContextMenuClick("Delete", obj.reference, node); } }, "UnDelete": { "label": "Restore", "action": function (obj) { HandleContextMenuClick("UnDelete", obj.reference, node); } }, "ShowInMenu": { "label": "Show In Menu", "action": function (obj) { HandleContextMenuClick("ShowInMenu", obj.reference, node); } }, "HideFromMenu": { "label": "Hide From Menu", "action": function (obj) { HandleContextMenuClick("HideFromMenu", obj.reference, node); } }, "MoveUp": { "label": "Move Up", "action": function (obj) { HandleContextMenuClick("MoveUp", obj.reference, node); } }, "MoveDown": { "label": "Move Down", "action": function (obj) { HandleContextMenuClick("MoveDown", obj.reference, node); } }, "ClearCache": { "label": "Clear Cache", "action": function (obj) { HandleContextMenuClick("ClearCache", obj.reference, node); } }, "ViewFrontEnd": { "label": "View Front End", "action": function (obj) { HandleContextMenuClick("ViewFrontEnd", obj.reference, node); } }, "Duplicate": { "label": "Duplicate", "action": function (obj) { HandleContextMenuClick("Duplicate", obj.reference, node); } }, "DuplicateIncludingAllChildren": { "label": "Duplicate + Children", "action": function (obj) { HandleContextMenuClick("DuplicateIncludingAllChildren", obj.reference, node); } }, "DeletePermanently": { "label": "Delete Permanently", "action": function (obj) { HandleContextMenuClick("DeletePermanently", obj.reference, node); } } }; } } }).on('move_node.jstree', function (e, data) { var sourceMediaId = data.node.id; var parentNode = $("#" + data.parent); var parentMediaId = parentNode.attr("id"); var newPosition = data.position; jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/HandleNodeDragDrop", data: "{'sourceMediaId':'" + sourceMediaId + "', 'parentMediaId':'" + parentMediaId + "', 'newPosition':'" + newPosition + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(parentMediaId); }, error: function (xhr, status, error) { DisplayJsonException(xhr); RefreshSiteTreeNodeById(parentMediaId); } }); }); } $(document) .on('dnd_move.vakata', function (e, data) { var target = $(data.event.target); var dropZone = null; var parentDropZone = target.parents("ul.dropZone"); if (target.hasClass("ul.dropZone")) { dropZone = target; } else if (parentDropZone.length > 0) { dropZone = parentDropZone; } return true; }) .on('dnd_stop.vakata', function (e, data) { var elem = $(data.element); var target = $(data.event.target); var tagName = target.prop("tagName").toLowerCase(); if (tagName != "ul") { target = target.parents("ul"); } var isDropZone = target.hasClass("dropZone") if (isDropZone) { var mediaDetailId = elem.parent().attr("mediadetailid"); var text = elem.text(); var href = elem.attr("href") + "&masterFilePath=~/Admin/Views/MasterPages/Popup.Master"; var li = "<li mediadetailid='" + mediaDetailId + "'><a class='delete'>x</a><span class='text'>" + text + "</span><a class='edit colorbox iframe' href='" + href + "'>Edit</a></li>"; if (target.find("li[mediadetailid='" + mediaDetailId + "']").length == 0) { target.append(li); UpdateValuesFromUL(target); } } }); function pageLoad(sender, args) { BindJQueryUIControls(); /*RefreshSiteTreeNodeById($("#SiteTree").jstree("get_selected")[0]); BindScrollMagic(); BindDataTable(); BindSortable(); BindTabs(); BindMultiFileUploaderImageLoadError(); if (MasterPage != undefined && MasterPage.indexOf("FieldEditor") == -1) { initTinyMCE(); setTimeout(function () { initAceEditors(); }, 1000) } if (typeof (BindActiveTabs) == 'function') BindActiveTabs();*/ } function BindDataTable() { //$('.DataTable').DataTable({ // dom: 'Bfrtip', // buttons: [ // { // extend: 'csvHtml5', // title: 'Data export' // } // ] //}); } function RefreshAdminUpdatePanel(elem) { RefreshUpdatePanel(elem, function () { BindSortable(); }); } $(document).ready(function () { init(); BindMultiFileUploaderImageLoadError(); }); function BindSortable() { $(".dropZone.sortable").sortable({ tolerance: "pointer", connectWith: '.dropZone.sortable', update: function (event, ui) { } }); $(".MultiFileUploader .sortable").sortable({ tolerance: "pointer", update: function (event, ui) { var arr = new Array(); $(this).children("li").each(function () { var id = $(this).attr("data-id"); arr.push(id); }); var root = $(this).parents(".MultiFileUploader"); root.find(".ReorderItems").val(JSON.stringify(arr)); } }); } function BindScrollMagic() { if(window.navigator.appVersion.indexOf("Trident") == -1) ScrollMagicSetup(".SavePanel"); ScrollMagicSetup("#SaveFields"); } var controllerScenesArray = []; function ScrollMagicSetup(selector) { var controller = null; var scene = null; var myObject = {}; var newEntry = true; $(controllerScenesArray).each(function () { if (this.selector == selector) { myObject = this; myObject.controller.destroy(); myObject.scene.destroy(); newEntry = false; return; } }); myObject.selector = selector; myObject.controller = controller = new ScrollMagic.Controller(); myObject.scene = new ScrollMagic.Scene({ offset: -50, triggerElement: selector, triggerHook: 0 }) .setPin(selector) .addTo(controller); if (newEntry) { controllerScenesArray.push(myObject); } } function ReloadPreviewPanel() { if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].src = $("#PreviewPanel")[0].src; } $(function () { $(':text').bind('keydown', function (e) { //on keydown for all textboxes if(e.target.className != "searchtextbox") { if (e.keyCode == 13) { //if this is enter key e.preventDefault(); return false; } else return true; } else return true; }); }); function UpdateULFromValues(elem) { var values = $(elem).find("input[type='hidden']"); //console.log(values); var json = eval(values.val()); /*var valsArray = vals.split(","); for (item of valsArray) { console.log(item); }*/ $(json).each(function () { //console.log(this); $(elem).append("<li mediadetailid='" + this.id + "'><a class='delete'>x</a><span class='text'>" + this.name + "</span><a class='edit colorbox iframe' href='" + this.adminUrl + "'>Edit</a></li>"); }); } function UpdateValuesFromUL(elem) { var values = $(elem).find("input[type='hidden']"); var arr = new Array(); $(elem).children("li:not(.hidden)").each(function () { var mediadetailid = $(this).attr("mediadetailid"); var adminUrl = $(this).find("a.edit").attr("href").replace(window.location.origin, ""); if (adminUrl.indexOf("masterFilePath") == -1) { adminUrl = adminUrl + "&masterFilePath=~/Admin/Views/MasterPages/Popup.Master"; } var name = $(this).children("span.text").text(); if (name != "") { var obj = new Object(); obj.name = name; obj.id = mediadetailid; obj.adminUrl = adminUrl; arr.push(obj); } }); var jsonString = JSON.stringify(arr); values.val(jsonString); } function init() { BindTree(); BindSortable(); BindScrollMagic(); $("#Filter").on("keyup", function (e) { var code = e.keyCode || e.which; if (code == 13) { var text = $(this).val(); BindTree(text); } }); $("ul.dropZone").each(function () { UpdateULFromValues(this); }); $(document).on("click", ".dropZone li a.delete", function () { var elem = $(this).closest(".dropZone")[0]; $(this).closest("li").remove(); UpdateValuesFromUL(elem); }); $("ul.dropZone").sortable({ update: function (event, ui) { UpdateValuesFromUL(this); } }); //TODO /* $(document).on("click", ".dropZone li a.delete", function () { var elem = $(this).closest(".dropZone")[0]; $(this).closest("li").remove(); UpdateValuesFromUL(elem); }); $("ul.dropZone").bind("DOMSubtreeModified", function () { UpdateValuesFromUL(this); }); $("ul.dropZone").sortable({ update: function (event, ui) { UpdateValuesFromUL(this); } });*/ $(document).on("click", ".MultiFileUploader .DeleteItem", function () { var root = $(this).closest(".MultiFileUploader"); var parentItem = $(this).closest(".item"); var itemsToDelete = root.find(".ItemsToDelete"); var image = parentItem.find("img"); if (image.length == 0) { image = $(this); } var itemId = $(this).attr('data-id'); var itemsToDeleteJson = JSON.parse(itemsToDelete.val()); var src = image.attr("src"); //console.log(itemsToDeleteJson); if (!image.hasClass("MarkedAsDeleted")) { image.addClass("MarkedAsDeleted"); if (itemsToDeleteJson.indexOf(itemId) == -1) { itemsToDeleteJson.push(itemId); } itemsToDelete.val(JSON.stringify(itemsToDeleteJson)); $(this).text("UnDelete"); } else { image.removeClass("MarkedAsDeleted"); var index = itemsToDeleteJson.indexOf(itemId) if (index != -1) { itemsToDeleteJson.splice(index, 1); //itemsToDeleteJson.push(itemId); } itemsToDelete.val(JSON.stringify(itemsToDeleteJson)); $(this).text("Delete"); } }); $(document).on("change", ".MultiFileUpload", function () { if (typeof (FileReader) != "undefined") { var root = $(this).parents(".MultiFileUploader"); var dvPreview = root.find(".dvPreview"); var uploadFilesNowButtons = root.find(".UploadFilesNowButtons"); dvPreview.html(""); var regex = /^([a-zA-Z0-9\s_\\.\-:])+(.jpg|.jpeg|.gif|.png|.bmp|.pdf|.csv|.docx|.doc)$/; $($(this)[0].files).each(function () { var file = $(this); if (regex.test(file[0].name.toLowerCase())) { var reader = new FileReader(); reader.onload = function (e) { if (file[0].type.indexOf("image") != -1) { var img = $("<img />"); img.attr("style", "width: 100px; height: 100px;"); img.attr("src", e.target.result); dvPreview.append(img); } else { var link = $("<a>" + file[0].name + "</a>"); link.attr("href", e.target.result); dvPreview.append(link); } } reader.readAsDataURL(file[0]); } else { alert(file[0].name + " is not a valid image file."); dvPreview.html(""); return false; } uploadFilesNowButtons.show(); }); } else { alert("This browser does not support HTML5 FileReader."); } }); }
WebApplication/Admin/Scripts/adminGlobal.js
/// <reference path="../Views/MasterPages/WebService.asmx" /> /// <reference path="../Views/MasterPages/WebService.asmx" /> window.onerror = function (e) { if(e.indexOf("UpdatePanel") !=-1) { window.location.reload(); } } function DisplayJsonException(xhr) { try { var jsonError = JSON.parse(xhr.responseText); jQuery.jGrowl(jsonError.Message, { header: "Error", life: 10000 }); } catch (e) { } } function executeAction(action, id, updatePanelId) { switch (action) { case "Refresh": break; case "Create": var createUrl = "Detail.aspx"; $.colorbox({ href: createUrl, width: colorBoxWidth, height: colorBoxHeight, iframe: true, fixed: true, onClosed: function () { __doPostBack(updatePanelId, ''); } }); break; case "Edit": var editUrl = "Detail.aspx?id=" + id; $.colorbox({ href: editUrl, width: colorBoxWidth, height: colorBoxHeight, iframe: true, fixed: true, onClosed: function () { __doPostBack(updatePanelId, ''); } }); break; case "Delete": if (confirm("Are you sure you want to delete this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/DeleteItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Deleting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "UnDelete": if (confirm("Are you sure you want to un-delete this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/UnDeleteItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Un-Deleting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "DeletePermanently": if (confirm("Are you sure you want to delete this item permanently?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/DeleteItemPermanently", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Removing Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "Approve": if (confirm("Are you sure you want to approve this item?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/ApproveItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Approving Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "TakeOwnership": if (confirm("Are you sure you want to take ownership of all items assigned to the selected user with ID (" + id + ") ?")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/TakeOwnership", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Approving Item", "An unexpected error occured while attempting to delete the item."); } }); } break; case "Reject": if (confirm("Are you sure you want to reject this item? Changes will be deleted permanently.")) { jQuery.ajax({ type: "POST", url: "ItemServices.asmx/RejectItem", data: "{'id':'" + id + "'}", contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { eval(msg.d); if (msg.d.indexOf('Error') == -1) __doPostBack(updatePanelId, ''); }, error: function (xhr, status, error) { DisplayErrorMessage("Error Rejecting Item", "An unexpected error occured while attempting to delete the item."); } }); } break; } } function getParameterByName(name) { name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]"); var regexS = "[\\?&]" + name + "=([^&#]*)"; var regex = new RegExp(regexS); var results = regex.exec(window.location.href); if (results == null) return ""; else return decodeURIComponent(results[1].replace(/\+/g, " ")); } var BaseWebserverUrl = BaseUrl + "Admin/Views/MasterPages/Webservice.asmx"; function HandleContextMenuClick(action, target, node) { var mediaDetailId = target.parent().attr("mediadetailid"); var targetText = target.text(); switch (action) { case "CreateChild": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/CreateChild", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { window.location.href = BaseUrl + "Admin/Views/PageHandlers/Media/Create.aspx"; }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "Delete": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Delete", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "UnDelete": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/UnDelete", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); //RefreshSiteTreeViewAjaxPanel(); //window.location.reload(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "Duplicate": var newName = prompt("Enter a new name for the page"); newName = newName.trim(); if (newName != null) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Duplicate", data: "{'id':'" + mediaDetailId + "', 'duplicateChildren':false,'newName':'" + newName + "'}", contentType: "application/json; charset=utf-8", success: function (msg) { //window.location.href = parentNode.get_navigateUrl(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "DuplicateIncludingAllChildren": var newName = prompt("Enter a new name for the page"); newName = newName.trim(); if (newName != null) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/Duplicate", data: "{'id':'" + mediaDetailId + "', 'duplicateChildren':true,'newName':'" + newName + "'}", contentType: "application/json; charset=utf-8", success: function (msg) { //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); //window.location.href = msg.d.replace("~", ""); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "ShowInMenu": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/ShowInMenu", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "HideFromMenu": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/HideFromMenu", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); loadingOverlay.hide(); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "MoveUp": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/MoveUp", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "MoveDown": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/MoveDown", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "DeletePermanently": var areYouSure = confirm('Are you sure you want to permanently delete the item "' + targetText + '" and all its associations, including ALL its child items and history if any? NOTE: This action is irreversible'); if (areYouSure) { loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/DeletePermanently", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //window.location.reload(); //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); } break; case "ClearCache": loadingOverlay.show(); jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/ClearCache", data: "{'id':'" + mediaDetailId + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { RefreshSiteTreeNodeById(node.parent); }, error: function (xhr, status, error) { DisplayJsonException(xhr); } }); break; case "ViewFrontEnd": //console.log(target.attr("data-frontendurl")); window.open(target.attr("frontendurl")); break; } } function createAutoCompleteObject(caption, value, meta) { return { caption: caption, value: value, meta: meta }; } function getFieldsAutoComplete() { var wordsArray = []; $("#MainFields label:first-child").each(function () { var labelFor = $(this).attr("for"); if (labelFor != undefined) { var splitFor = $(this).attr("for").split("_"); var text = splitFor[splitFor.length - 1]; //var text = $(this).text().replace(/\s/g, '').replace(":", ""); text = "{" + text + "}"; wordsArray.push(createAutoCompleteObject(text, text, "main")); } }); $(".field > label").each(function () { var text = $(this).attr('data-fieldcode').replace(/\s/g, ''); text = "{Field:" + text + "}"; wordsArray.push(createAutoCompleteObject(text, text, "custom field")); }); wordsArray.push(createAutoCompleteObject( '<Site:GenerateNav', '<Site:GenerateNav runat="server" \ RenderRootMedia="True" \ RootMediaID="2"\ RenderDepth="2"\ DisplayProtectedSections="false" /', 'user control' )); wordsArray.push(createAutoCompleteObject( '<Site:RenderChildren', '<Site:RenderChildren runat="server" \ MediaID="0" \ ShowPager="True" \ PageSize="10" \ ChildPropertyName="UseSummaryLayout" \ Where=\'MediaType.Name=="Page"\' \ OrderBy="DateCreated DESC" />', 'user control' )); wordsArray.push(createAutoCompleteObject( '<Site:RenderMedia', '<Site:RenderMedia runat="server" \ MediaID="2" \ PropertyName="UseSummaryLayout" />', 'user control' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorIfField', '<!-- LayoutsTab:RazorIfField: Razor Code Showing how to load a field and check its value --> \ @{ \ var field = Model.RenderField("test1"); \ \ <ul>\ @if(field == "True")\ {\ <li>If condition is true</li>\ }\ else\ {\ <li>You entered: @Raw(field)</li>\ }\ </ul>\ }', 'razor code' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorLoopAssociatedItems', '<!-- LayoutsTab:RazorLoopAssociatedItems: Razor Code showing how you can load a field and loop through its associated items -->\ @{ \ var field = Model.LoadField("Dropfield");\ \ <ul>\ @foreach(var item in field.FieldAssociations)\ {\ var detail = item.MediaDetail;\ <li><a href="@detail.AbsoluteUrl">@Raw(detail.RenderField("SectionTitle"))</a></li>\ }\ </ul>\ }', 'razor code' )); wordsArray.push(createAutoCompleteObject( 'LayoutsTab:RazorRenderChildren', '<!-- LayoutsTab:RazorRenderChildren: Razor Code to loop through and render child items -->\ @{\ var mediaId = Model.MediaID; // You can change this to any Media ID to load the children of that page\ var media = MediasMapper.GetByID(mediaId);\ \ if(media != null)\ {\ var mediaDetail = media.GetLiveMediaDetail();\ \ if(mediaDetail != null)\ {\ var childItems = mediaDetail.ChildMediaDetails;\ <ul>\ @foreach(var child in childItems)\ {\ <li><a href="@child.AbsoluteUrl">@Raw(child.RenderField("SectionTitle"))</a></li>\ }\ </ul>\ }\ }\ }', 'razor code' )); return wordsArray; } function launchIntoFullscreen(element) { //element = $(element).parent()[0]; if (element.requestFullscreen) { element.requestFullscreen(); } else if (element.mozRequestFullScreen) { element.mozRequestFullScreen(); } else if (element.webkitRequestFullscreen) { element.webkitRequestFullscreen(); } else if (element.msRequestFullscreen) { element.msRequestFullscreen(); } } function initAceEditors() { var wordList = getFieldsAutoComplete(); $(document).on("change", "#AttachEditorToBrowserPanel", function () { if(!$(this).is(":checked")) { if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].src = $("#PreviewPanel")[0].src; } else { var textarea = $(this).parent().find("textarea"); var value = textarea.val(); if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].contentWindow.document.body.innerHTML = value; } }); $(document).on("click", ".AceEditorFullScreen", function () { var element = $(this).parent().find(".ace_editor")[0]; /*var mainArea = $("#mainArea")[0]; if (mainArea != undefined || mainArea != null) { element = mainArea; }*/ launchIntoFullscreen(element); }); $(".AceEditor").each(function () { var textarea = $(this); var id = $(this).attr("id"); var editorId = $(this).attr("name") + "-editor"; if (document.getElementById(editorId) != null) return; if ($("#PreviewPanel").length > 0) { if ($(this).hasClass("CanAttachToBrowserPanel")) { if ($("#" + id).parent().find("#AttachEditorToBrowserPanel").length == 0) { $("#" + id).parent().prepend("<input type='checkbox' id='AttachEditorToBrowserPanel' /> Attach editor to browser panel"); } } } if ($("#" + id).parent().find(".AceEditorFullScreen").length == 0) { $("#" + id).parent().prepend("<a class='AceEditorFullScreen' href='#' data-editorid='" + editorId +"'>View Full Screen</a><br />"); } var style = $(this).attr("style"); $(this).parent().append("<div id='" + editorId + "' class='ace-editor' style='" + style + "'></div>"); var editor = ace.edit(editorId); textarea.hide(); editor.on('focus', function () { editor.getSession().setUseWorker(true); }); editor.on('blur', function () { editor.getSession().setUseWorker(false); }); var session = editor.getSession(); session.setUseWorker(false); editor.setTheme("ace/theme/iplastic"); editor.setValue(textarea.val(), 1); session.setMode("ace/mode/html"); editor.$blockScrolling = Infinity; editor.$useWorker = false; //editor.clearSelection(); var langTools = ace.require('ace/ext/language_tools'); // enable autocompletion and snippets editor.setOptions({ enableBasicAutocompletion: true, enableSnippets: true, enableLiveAutocompletion: false, showPrintMargin: false, }); var customCompleter = { getCompletions: function (editor, session, pos, prefix, callback) { callback(null, wordList.map(function (autoCompleteObject) { return { caption: autoCompleteObject.caption, value: autoCompleteObject.value, meta: autoCompleteObject.meta }; })); } } editor.completers = [langTools.snippetCompleter, langTools.textCompleter, customCompleter] var htmlBeautifyOptions = { }; editor.commands.addCommand({ name: 'Beautify', bindKey: { win: 'Ctrl-S', mac: 'Command-S' }, exec: function (editor) { var value = editor.getSession().getValue(); // TODO: Format HTML value = value.replace(/<[^]+/, function (match) { /*if (/@for|@if|@[\s]?{|Helper.|!=|List</.test(match)) { return match; }*/ match = html_beautify(match); return match; }); editor.setValue(value, 1); $(".SavePageButton")[0].click(); }, readOnly: true // false if this command should not apply in readOnly mode }); editor.getSession().on('change', function () { var value = editor.getSession().getValue(); textarea.val(value); if ($("#PreviewPanel").length > 0) { if (textarea.parent().find("#AttachEditorToBrowserPanel").is(":checked")) { $("#PreviewPanel")[0].contentWindow.document.body.innerHTML = value; } } }); }); } function destroyAceEditors() { $(".AceEditor").each(function () { var editorId = $(this).attr("name") + "-editor"; var editor = ace.edit(editorId); editor.destroy(); }); } function destroyTinyMCE() { if (typeof (tinyMCE) !== 'undefined') { var length = tinymce.editors.length; for (var i = length; i > 0; i--) { var editor = tinymce.editors[i - 1]; editor.remove(); }; } } $(window).load(function () { //console.log("window load"); //setTimeout(function () { initAceEditors(); initTinyMCE(); //}, 1000); $(document).ajaxComplete(function (event, xhr, settings) { //console.log("ajaxComplete"); if (settings.url.indexOf("Chat.asmx") == -1) { //setTimeout(function () { initAceEditors(); initTinyMCE(); BindJQueryUIControls(); //}, 1000); } }); }); function BindMultiFileUploaderImageLoadError() { $(".MultiFileUploader img").error(function () { $(this).attr("src", BaseUrl + "media/images/icons/File.jpg"); }); } function BindTabs() { $('.tabs').tabs(); } function BindJQueryUIControls() { try { $("select").selectmenu({ change: function (event, ui) { $(event.target).trigger("change"); } }); //$("[class='RadioButtonList']").each(function () { // var inputItems = $(this).find("input"); // var index = $("[class='RadioButtonList']").index(this); // var name = "RadioButtonListItem_" + index; // inputItems.attr("type", "radio") // inputItems.attr("name", name); //}); //$("fieldset input[type=checkbox], fieldset input[type=radio]").checkboxradio(); } catch (ex) { } } $(document).ready(function () { BindJQueryUIControls(); //BindScrollMagic(); $('.tooltip').each(function () { var title = $(this).attr("title"); if(title == undefined || title == "") { $(this).hide(); } }); $('.tooltip').tooltipster({ contentAsHTML: true, interactive: true, maxWidth: 500, trigger: 'click' }); $('ul.sf-menu').superfish(); BindTabs(); $('input.datetimepicker').datetimepicker({ controlType: 'select', oneLine: true, dateFormat: 'yy-mm-dd', timeFormat: 'hh:mm:ss TT' }); $(document).on("keydown", function (event) { if (event.ctrlKey || event.metaKey) { switch (String.fromCharCode(event.which).toLowerCase()) { case 's': $('.SavePageButton')[0].click(); event.preventDefault(); break; } } }); $(document).on('click', '.SavePageButton', function (event) { $(".save-template").click(); var text = $(".SaveFieldButton").text(); if (text.indexOf("Save") != -1) { var autoClickedSaveFieldButton = false; OnUpdatePanelRefreshComplete(function (event) { if (!autoClickedSaveFieldButton) { $(".SaveFieldButton")[0].click(); autoClickedSaveFieldButton = true; //ReloadPreviewPanel(); } }); } return true; }); }); function initTinyMCE() { tinymce.editors = []; tfm_path = BaseUrl + "Scripts/tinyfilemanager.net"; tinymce.init({ selector: ".editor", content_css: BaseUrl + "Views/MasterPages/SiteTemplates/css/main.css, " + BaseUrl + "Admin/Styles/editor.css", menubar: false, plugins: [ 'advlist autolink lists link image charmap print preview hr anchor pagebreak', 'searchreplace wordcount visualblocks visualchars fullscreen', 'insertdatetime media youtube nonbreaking save table contextmenu directionality', 'emoticons template paste textcolor colorpicker textpattern imagetools ace imgmap table map' ], toolbar1: 'file undo redo | styleselect | bold italic underline | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | insert table link image imgmap media youtube ace map', templates: [ ], image_advtab: true, relative_urls: false, convert_urls: false, remove_script_host: false, verify_html: false, valid_children: '+a[div|p|ul|ol|li|h1|span|h2|h3|h4|h5|h5|h6]', extended_valid_elements: 'span[*],a[*],+iframe[src|width|height|name|align|class]', custom_shortcuts: false, setup: function (editor) { editor.on('change', function () { editor.save(); }); editor.on('keydown', function (event) { if (event.ctrlKey || event.metaKey) { switch (String.fromCharCode(event.which).toLowerCase()) { case 's': $('.SavePageButton')[0].click(); event.preventDefault(); break; } } }); } }); } function BindGridViewSortable(CssSelector, WebserviceUrl, UpdatePanelClientId, OnAfterRefreshFunction) { var DragDropGridSortable = $(CssSelector).sortable({ items: 'tr:not(tr:first-child)', cursor: 'crosshair', connectWith: '.DragDropGrid', axis: 'y', dropOnEmpty: true, receive: function (e, ui) { $(this).find("tbody").append(ui.item); }, update: function (event, ui) { var ths = $(this).find("th"); var trs = $(this).find("tr:not(tr:first-child)"); var cols = []; var entries = {}; ths.each(function () { var text = $.trim($(this).text()); if (text != "") cols.push(text); }); var entries = ""; var trsIndex = 0; trs.each(function () { var tds = $(this).find("td"); var propIndex = 0; var properties = ""; tds.each(function () { var col = cols[propIndex]; if (col != undefined) { properties += "\"" + col + "\":\"" + $.trim($(this).text()) + "\""; propIndex++; if (cols[propIndex] != undefined) properties += ","; } }); trsIndex++; var entry = "{" + properties + "}"; if (trs[trsIndex] != undefined) entry += ","; entries += entry; }); var jsonString = "{\"items\":[" + entries + "]}"; if (WebserviceUrl != "") { jQuery.ajax({ type: "POST", url: WebserviceUrl, data: jsonString, contentType: "application/json; charset=utf-8", dataType: "json", success: function (msg) { var isFunc = jQuery.isFunction(OnAfterRefreshFunction); RefreshUpdatePanel(UpdatePanelClientId, function () { BindGridViewSortable(CssSelector, WebserviceUrl, UpdatePanelClientId); if (isFunc) { OnAfterRefreshFunction(); } }); }, error: function (xhr, status, error) { } }); } else { OnAfterRefreshFunction(); } } }); } $(function () { $('div.split-pane').splitPane(); //$("#SiteTree ul").sortable({ connectWith: "ul" }); //BindTree(); $(document).on('click', '#SiteTree a', function () { window.location.href = $(this).attr("href"); }); }); function RefreshSiteTreeParentNode() { var selected = $("#SiteTree").jstree("get_selected"); if (selected.length > 0) { var selectedId = selected[0]; var parentId = $("#SiteTree").jstree(true).get_parent(selectedId); RefreshSiteTreeNodeById(parentId); } } function RefreshSiteTreeSelectedNode() { var selected = $("#SiteTree").jstree("get_selected"); if (selected.length > 0) { var selectedId = selected[0]; RefreshSiteTreeNodeById(selectedId); } } function RefreshSiteTreeNodeById(nodeId) { var jsTree = $('#SiteTree').jstree(true); if (jsTree != false) jsTree.refresh_node(nodeId); } function ConvertDivToSiteTree(divSelector, targetSelector, mediaId) { var jsTree = $(divSelector).jstree(true); var filterText = ""; if (jsTree != false) { jsTree.destroy(); } $(divSelector).jstree({ "plugins": ["checkbox"], "checkbox": { "keep_selected_style": false }, 'types': { 'default': { 'icon': 'jstree-icon jstree-file' } }, "core": { // so that create works "multiple": true, "cascade":"", 'data': { 'url': function (node) { if (filterText == "" || filterText == undefined || filterText == null) return node.id === '#' ? BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetRootNodes' : BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetChildNodes'; else return BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/SearchForNodes?filterText=' + filterText; }, 'data': function (node) { return { 'id': node.id }; } } } }).on('ready.jstree', function (e, data) { data.instance.uncheck_all(); }).on("changed.jstree", function (node, action, selected, event) { var checkedItems = action.instance.get_checked(); $(targetSelector).val(JSON.stringify(checkedItems)); }) } function BindTree(filterText) { var jsTree = $('#SiteTree').jstree(true); if (jsTree != false) { jsTree.destroy(); } $('#SiteTree').jstree({ 'types': { 'default': { 'icon': 'jstree-icon jstree-file' } }, "core": { // so that create works "check_callback": true, "multiple": false, 'data': { 'url': function (node) { if (filterText == "" || filterText == undefined || filterText == null) return node.id === '#' ? BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetRootNodes' : BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/GetChildNodes'; else return BaseUrl + 'Admin/Views/MasterPages/WebService.asmx/SearchForNodes?filterText=' + filterText; }, 'data': function (node) { return { 'id': node.id }; } } }, "plugins": ["contextmenu", "dnd", "types"], "contextmenu": { "items": function (node) { return { "Create": { "label": "Create Child", "action": function (obj) { //this.create(obj); HandleContextMenuClick("CreateChild", obj.reference, node); } }, "Delete": { "label": "Mark As Deleted", "action": function (obj) { HandleContextMenuClick("Delete", obj.reference, node); } }, "UnDelete": { "label": "Restore", "action": function (obj) { HandleContextMenuClick("UnDelete", obj.reference, node); } }, "ShowInMenu": { "label": "Show In Menu", "action": function (obj) { HandleContextMenuClick("ShowInMenu", obj.reference, node); } }, "HideFromMenu": { "label": "Hide From Menu", "action": function (obj) { HandleContextMenuClick("HideFromMenu", obj.reference, node); } }, "MoveUp": { "label": "Move Up", "action": function (obj) { HandleContextMenuClick("MoveUp", obj.reference, node); } }, "MoveDown": { "label": "Move Down", "action": function (obj) { HandleContextMenuClick("MoveDown", obj.reference, node); } }, "ClearCache": { "label": "Clear Cache", "action": function (obj) { HandleContextMenuClick("ClearCache", obj.reference, node); } }, "ViewFrontEnd": { "label": "View Front End", "action": function (obj) { HandleContextMenuClick("ViewFrontEnd", obj.reference, node); } }, "Duplicate": { "label": "Duplicate", "action": function (obj) { HandleContextMenuClick("Duplicate", obj.reference, node); } }, "DuplicateIncludingAllChildren": { "label": "Duplicate + Children", "action": function (obj) { HandleContextMenuClick("DuplicateIncludingAllChildren", obj.reference, node); } }, "DeletePermanently": { "label": "Delete Permanently", "action": function (obj) { HandleContextMenuClick("DeletePermanently", obj.reference, node); } } }; } } }).on('move_node.jstree', function (e, data) { var sourceMediaId = data.node.id; var parentNode = $("#" + data.parent); var parentMediaId = parentNode.attr("id"); var newPosition = data.position; jQuery.ajax({ type: "POST", url: BaseWebserverUrl + "/HandleNodeDragDrop", data: "{'sourceMediaId':'" + sourceMediaId + "', 'parentMediaId':'" + parentMediaId + "', 'newPosition':'" + newPosition + "'}", contentType: "application/json; charset=utf-8", dataType: "text", success: function (msg) { //RefreshSiteTreeViewAjaxPanel(); RefreshSiteTreeNodeById(parentMediaId); }, error: function (xhr, status, error) { DisplayJsonException(xhr); RefreshSiteTreeNodeById(parentMediaId); } }); }); } $(document) .on('dnd_move.vakata', function (e, data) { var target = $(data.event.target); var dropZone = null; var parentDropZone = target.parents("ul.dropZone"); if (target.hasClass("ul.dropZone")) { dropZone = target; } else if (parentDropZone.length > 0) { dropZone = parentDropZone; } return true; }) .on('dnd_stop.vakata', function (e, data) { var elem = $(data.element); var target = $(data.event.target); var tagName = target.prop("tagName").toLowerCase(); if (tagName != "ul") { target = target.parents("ul"); } var isDropZone = target.hasClass("dropZone") if (isDropZone) { var mediaDetailId = elem.parent().attr("mediadetailid"); var text = elem.text(); var href = elem.attr("href") + "&masterFilePath=~/Admin/Views/MasterPages/Popup.Master"; var li = "<li mediadetailid='" + mediaDetailId + "'><a class='delete'>x</a><span class='text'>" + text + "</span><a class='edit colorbox iframe' href='" + href + "'>Edit</a></li>"; if (target.find("li[mediadetailid='" + mediaDetailId + "']").length == 0) { target.append(li); UpdateValuesFromUL(target); } } }); function pageLoad(sender, args) { BindJQueryUIControls(); /*RefreshSiteTreeNodeById($("#SiteTree").jstree("get_selected")[0]); BindScrollMagic(); BindDataTable(); BindSortable(); BindTabs(); BindMultiFileUploaderImageLoadError(); if (MasterPage != undefined && MasterPage.indexOf("FieldEditor") == -1) { initTinyMCE(); setTimeout(function () { initAceEditors(); }, 1000) } if (typeof (BindActiveTabs) == 'function') BindActiveTabs();*/ } function BindDataTable() { //$('.DataTable').DataTable({ // dom: 'Bfrtip', // buttons: [ // { // extend: 'csvHtml5', // title: 'Data export' // } // ] //}); } function RefreshAdminUpdatePanel(elem) { RefreshUpdatePanel(elem, function () { BindSortable(); }); } $(document).ready(function () { init(); BindMultiFileUploaderImageLoadError(); }); function BindSortable() { $(".dropZone.sortable").sortable({ tolerance: "pointer", connectWith: '.dropZone.sortable', update: function (event, ui) { } }); $(".MultiFileUploader .sortable").sortable({ tolerance: "pointer", update: function (event, ui) { var arr = new Array(); $(this).children("li").each(function () { var id = $(this).attr("data-id"); arr.push(id); }); var root = $(this).parents(".MultiFileUploader"); root.find(".ReorderItems").val(JSON.stringify(arr)); } }); } function BindScrollMagic() { if(window.navigator.appVersion.indexOf("Trident") == -1) ScrollMagicSetup(".SavePanel"); ScrollMagicSetup("#SaveFields"); } var controllerScenesArray = []; function ScrollMagicSetup(selector) { var controller = null; var scene = null; var myObject = {}; var newEntry = true; $(controllerScenesArray).each(function () { if (this.selector == selector) { myObject = this; myObject.controller.destroy(); myObject.scene.destroy(); newEntry = false; return; } }); myObject.selector = selector; myObject.controller = controller = new ScrollMagic.Controller(); myObject.scene = new ScrollMagic.Scene({ offset: -50, triggerElement: selector, triggerHook: 0 }) .setPin(selector) .addTo(controller); if (newEntry) { controllerScenesArray.push(myObject); } } function ReloadPreviewPanel() { if ($("#PreviewPanel").length > 0) $("#PreviewPanel")[0].src = $("#PreviewPanel")[0].src; } $(function () { $(':text').bind('keydown', function (e) { //on keydown for all textboxes if(e.target.className != "searchtextbox") { if (e.keyCode == 13) { //if this is enter key e.preventDefault(); return false; } else return true; } else return true; }); }); function UpdateULFromValues(elem) { var values = $(elem).find("input[type='hidden']"); //console.log(values); var json = eval(values.val()); /*var valsArray = vals.split(","); for (item of valsArray) { console.log(item); }*/ $(json).each(function () { //console.log(this); $(elem).append("<li mediadetailid='" + this.id + "'><a class='delete'>x</a><span class='text'>" + this.name + "</span><a class='edit colorbox iframe' href='" + this.adminUrl + "'>Edit</a></li>"); }); } function UpdateValuesFromUL(elem) { var values = $(elem).find("input[type='hidden']"); var arr = new Array(); $(elem).children("li:not(.hidden)").each(function () { var mediadetailid = $(this).attr("mediadetailid"); var adminUrl = $(this).find("a.edit").attr("href").replace(window.location.origin, ""); if (adminUrl.indexOf("masterFilePath") == -1) { adminUrl = adminUrl + "&masterFilePath=~/Admin/Views/MasterPages/Popup.Master"; } var name = $(this).children("span.text").text(); if (name != "") { var obj = new Object(); obj.name = name; obj.id = mediadetailid; obj.adminUrl = adminUrl; arr.push(obj); } }); var jsonString = JSON.stringify(arr); values.val(jsonString); } function init() { BindTree(); BindSortable(); BindScrollMagic(); $("#Filter").on("keyup", function (e) { var code = e.keyCode || e.which; if (code == 13) { var text = $(this).val(); BindTree(text); } }); $("ul.dropZone").each(function () { UpdateULFromValues(this); }); $(document).on("click", ".dropZone li a.delete", function () { var elem = $(this).closest(".dropZone")[0]; $(this).closest("li").remove(); UpdateValuesFromUL(elem); }); $("ul.dropZone").sortable({ update: function (event, ui) { UpdateValuesFromUL(this); } }); //TODO /* $(document).on("click", ".dropZone li a.delete", function () { var elem = $(this).closest(".dropZone")[0]; $(this).closest("li").remove(); UpdateValuesFromUL(elem); }); $("ul.dropZone").bind("DOMSubtreeModified", function () { UpdateValuesFromUL(this); }); $("ul.dropZone").sortable({ update: function (event, ui) { UpdateValuesFromUL(this); } });*/ $(document).on("click", ".MultiFileUploader .DeleteItem", function () { var root = $(this).closest(".MultiFileUploader"); var parentItem = $(this).closest(".item"); var itemsToDelete = root.find(".ItemsToDelete"); var image = parentItem.find("img"); if (image.length == 0) { image = $(this); } var itemId = $(this).attr('data-id'); var itemsToDeleteJson = JSON.parse(itemsToDelete.val()); var src = image.attr("src"); //console.log(itemsToDeleteJson); if (!image.hasClass("MarkedAsDeleted")) { image.addClass("MarkedAsDeleted"); if (itemsToDeleteJson.indexOf(itemId) == -1) { itemsToDeleteJson.push(itemId); } itemsToDelete.val(JSON.stringify(itemsToDeleteJson)); $(this).text("UnDelete"); } else { image.removeClass("MarkedAsDeleted"); var index = itemsToDeleteJson.indexOf(itemId) if (index != -1) { itemsToDeleteJson.splice(index, 1); //itemsToDeleteJson.push(itemId); } itemsToDelete.val(JSON.stringify(itemsToDeleteJson)); $(this).text("Delete"); } }); $(document).on("change", ".MultiFileUpload", function () { if (typeof (FileReader) != "undefined") { var root = $(this).parents(".MultiFileUploader"); var dvPreview = root.find(".dvPreview"); var uploadFilesNowButtons = root.find(".UploadFilesNowButtons"); dvPreview.html(""); var regex = /^([a-zA-Z0-9\s_\\.\-:])+(.jpg|.jpeg|.gif|.png|.bmp|.pdf|.csv|.docx|.doc)$/; $($(this)[0].files).each(function () { var file = $(this); if (regex.test(file[0].name.toLowerCase())) { var reader = new FileReader(); reader.onload = function (e) { if (file[0].type.indexOf("image") != -1) { var img = $("<img />"); img.attr("style", "width: 100px; height: 100px;"); img.attr("src", e.target.result); dvPreview.append(img); } else { var link = $("<a>" + file[0].name + "</a>"); link.attr("href", e.target.result); dvPreview.append(link); } } reader.readAsDataURL(file[0]); } else { alert(file[0].name + " is not a valid image file."); dvPreview.html(""); return false; } uploadFilesNowButtons.show(); }); } else { alert("This browser does not support HTML5 FileReader."); } }); }
- Updated tinymce settings
WebApplication/Admin/Scripts/adminGlobal.js
- Updated tinymce settings
<ide><path>ebApplication/Admin/Scripts/adminGlobal.js <ide> 'insertdatetime media youtube nonbreaking save table contextmenu directionality', <ide> 'emoticons template paste textcolor colorpicker textpattern imagetools ace imgmap table map' <ide> ], <del> toolbar1: 'file undo redo | styleselect | bold italic underline | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | insert table link image imgmap media youtube ace map', <add> toolbar1: 'undo redo | paste pastetext | bold italic underline strikethrough superscript subscript charmap emoticons | formatselect blockquote | alignleft aligncenter alignright alignjustify outdent indent | bullist numlist | insert table | anchor link image imgmap media youtube map | visualblocks ace', <ide> templates: [ <ide> ], <ide> image_advtab: true,
Java
apache-2.0
2cefcefc053a84e3964ea3156324436dac547113
0
AxonFramework/AxonFramework,krosenvold/AxonFramework
/* * Copyright (c) 2010-2018. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.common.lock; import org.axonframework.common.Assert; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.WeakHashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import static java.util.Collections.newSetFromMap; import static java.util.Collections.synchronizedMap; /** * Implementation of a {@link LockFactory} that uses a pessimistic locking strategy. Calls to * {@link #obtainLock} will block until a lock could be obtained or back off limit is reached, based on the * settings provided, by throwing an exception. The latter will cause the command to fail, but will allow * the calling thread to be freed. If a lock is obtained by a thread, that thread has guaranteed unique access. * <p/> * Each thread can hold the same lock multiple times. The lock will only be released for other threads when the lock * has been released as many times as it was obtained. * <p/> * This lock can be used to ensure thread safe access to a number of objects, such as Aggregates and Sagas. * * @author Allard Buijze * @author Michael Bischoff * @author Henrique Sena * @since 1.3 */ public class PessimisticLockFactory implements LockFactory { private static final Set<PessimisticLockFactory> INSTANCES = newSetFromMap(synchronizedMap(new WeakHashMap<>())); private final ConcurrentHashMap<String, DisposableLock> locks = new ConcurrentHashMap<>(); private final int acquireAttempts; private final int maximumQueued; private final int lockAttemptTimeout; private static Set<Thread> threadsWaitingForMyLocks(Thread owner) { return threadsWaitingForMyLocks(owner, INSTANCES); } private static Set<Thread> threadsWaitingForMyLocks(Thread owner, Set<PessimisticLockFactory> locksInUse) { Set<Thread> waitingThreads = new HashSet<>(); for (PessimisticLockFactory lock : locksInUse) { lock.locks.values().stream() .filter(disposableLock -> disposableLock.isHeldBy(owner)) .forEach(disposableLock -> disposableLock.queuedThreads().stream() .filter(waitingThreads::add) .forEach(thread -> waitingThreads.addAll(threadsWaitingForMyLocks(thread, locksInUse)))); } return waitingThreads; } /** * Creates a builder to construct an instance of this LockFactory. * * @return a builder allowing the definition of properties for this Lock Factory. */ public static Builder builder() { return new Builder(); } /** * Creates an instance using default values, as defined in the properties of the {@link Builder}. * * @return a PessimisticLockFactory instance using sensible default values */ public static PessimisticLockFactory usingDefaults() { return builder().build(); } /** * Creates an instance of the lock factory using the given {@code builder} containing the configuration properties * to use. * * @param builder The building containing the configuration properties to use */ protected PessimisticLockFactory(Builder builder) { this.acquireAttempts = builder.acquireAttempts; this.maximumQueued = builder.maximumQueued; this.lockAttemptTimeout = builder.lockAttemptTimeout; INSTANCES.add(this); } /** * Obtain a lock for a resource identified by the given {@code identifier}. This method will block until a * lock was successfully obtained. * <p/> * Note: when an exception occurs during the locking process, the lock may or may not have been allocated. * * @param identifier the identifier of the lock to obtain. * @return a handle to release the lock. If the thread that releases the lock does not hold the lock * {@link IllegalMonitorStateException} is thrown * {@link IllegalArgumentException} is thrown when identifier is null */ @Override public Lock obtainLock(String identifier) { Assert.nonNull(identifier, () -> "Aggregate identifier may not be null"); boolean lockObtained = false; DisposableLock lock = null; while (!lockObtained) { lock = lockFor(identifier); lockObtained = lock.lock(); if (!lockObtained) { locks.remove(identifier, lock); } } return lock; } private DisposableLock lockFor(String identifier) { DisposableLock lock = locks.get(identifier); while (lock == null) { locks.putIfAbsent(identifier, new DisposableLock(identifier)); lock = locks.get(identifier); } return lock; } private static final class PubliclyOwnedReentrantLock extends ReentrantLock { private static final long serialVersionUID = -2259228494514612163L; @Override public Collection<Thread> getQueuedThreads() { // NOSONAR return super.getQueuedThreads(); } public boolean isHeldBy(Thread thread) { return thread.equals(getOwner()); } } /** * Builder class for the {@link PessimisticLockFactory}. */ public static class Builder { private int acquireAttempts = 100; private int maximumQueued = Integer.MAX_VALUE; private int lockAttemptTimeout = 600; /** * Default constructor */ protected Builder() { } /** * Indicates howmany attempts should be done to acquire a lock. In combination with the * {@link #lockAttemptTimeout(int)}, this defines the total timeout of a lock acquisition. * <p> * Defaults to 100. * * @param acquireAttempts The number of attempts to acquire the lock * @return this Builder, for further configuration */ public Builder acquireAttempts(int acquireAttempts) { Assert.isTrue( acquireAttempts > 0 || acquireAttempts == -1, () -> "acquireAttempts needs to be a positive integer or -1, but was '" + acquireAttempts + "'" ); this.acquireAttempts = acquireAttempts; return this; } /** * Defines the maximum number of queued threads to allow for this lock. If the given number of threads are * waiting to acquire a lock, and another thread joins, that thread will immediately fail any attempt to acquire * the lock, as if it had timed out. * <p> * Defaults to unbounded. * * @param maximumQueued The maximum number of threads to allow in the queue for this lock * @return this Builder, for further configuration */ public Builder queueLengthThreshold(int maximumQueued) { Assert.isTrue( maximumQueued > 0, () -> "queueLengthThreshold needs to be a positive integer, but was '" + maximumQueued + "'" ); this.maximumQueued = maximumQueued; return this; } /** * The duration of a single attempt to acquire the internal lock. In combination with the * {@link #acquireAttempts(int)}, this defines the total timeout of an acquisition attempt. * <p> * Defaults to 600ms. * * @param lockAttemptTimeout The duration of a single aqcuisition attempt of the internal lock, in milliseconds * @return this Builder, for further configuration */ public Builder lockAttemptTimeout(int lockAttemptTimeout) { Assert.isTrue( lockAttemptTimeout >= 0, () -> "lockAttemptTimeout needs to be a non negative integer, but was '" + lockAttemptTimeout + "'" ); this.lockAttemptTimeout = lockAttemptTimeout; return this; } /** * Builds the PessimisticLockFactory instance using the properties defined in this builder * * @return a fully configured PessimisticLockFactory instance */ public PessimisticLockFactory build() { return new PessimisticLockFactory(this); } } private class DisposableLock implements Lock { private final String identifier; private final PubliclyOwnedReentrantLock lock; private volatile boolean isClosed = false; private DisposableLock(String identifier) { this.identifier = identifier; this.lock = new PubliclyOwnedReentrantLock(); } @Override public void release() { try { lock.unlock(); } finally { disposeIfUnused(); } } @Override public boolean isHeld() { return lock.isHeldByCurrentThread(); } public boolean lock() { if (lock.getQueueLength() >= maximumQueued) { throw new LockAcquisitionFailedException("Failed to acquire lock for aggregate identifier " + identifier + ": too many queued threads."); } try { if (!lock.tryLock(0, TimeUnit.NANOSECONDS)) { int attempts = acquireAttempts - 1; do { attempts--; checkForDeadlock(); if (attempts < 1) { throw new LockAcquisitionFailedException( "Failed to acquire lock for aggregate identifier(" + identifier + "), maximum attempts exceeded (" + acquireAttempts + ")" ); } } while (!lock.tryLock(lockAttemptTimeout, TimeUnit.MILLISECONDS)); } } catch (InterruptedException e) { throw new LockAcquisitionFailedException("Thread was interrupted", e); } if (isClosed) { lock.unlock(); return false; } return true; } private void checkForDeadlock() { if (!lock.isHeldByCurrentThread() && lock.isLocked()) { for (Thread thread : threadsWaitingForMyLocks(Thread.currentThread())) { if (lock.isHeldBy(thread)) { throw new DeadlockException( "An imminent deadlock was detected while attempting to acquire a lock" ); } } } } private void disposeIfUnused() { if (lock.tryLock()) { try { if (lock.getHoldCount() == 1) { // we now have a lock. We can shut it down. isClosed = true; locks.remove(identifier, this); } } finally { lock.unlock(); } } } public Collection<Thread> queuedThreads() { return lock.getQueuedThreads(); } public boolean isHeldBy(Thread owner) { return lock.isHeldBy(owner); } } }
messaging/src/main/java/org/axonframework/common/lock/PessimisticLockFactory.java
/* * Copyright (c) 2010-2018. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.common.lock; import org.axonframework.common.Assert; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import static java.util.Collections.newSetFromMap; import static java.util.Collections.synchronizedMap; import static org.axonframework.common.Assert.assertThat; /** * Implementation of a {@link LockFactory} that uses a pessimistic locking strategy. Calls to * {@link #obtainLock} will block until a lock could be obtained or back off limit is reached, based on the * settings provided, by throwing an exception. The latter will cause the command to fail, but will allow * the calling thread to be freed. If a lock is obtained by a thread, that thread has guaranteed unique access. * <p/> * Each thread can hold the same lock multiple times. The lock will only be released for other threads when the lock * has been released as many times as it was obtained. * <p/> * This lock can be used to ensure thread safe access to a number of objects, such as Aggregates and Sagas. * * @author Allard Buijze * @author Michael Bischoff * @author Henrique Sena * @since 1.3 */ public class PessimisticLockFactory implements LockFactory { private static final Set<PessimisticLockFactory> INSTANCES = newSetFromMap(synchronizedMap(new WeakHashMap<>())); private final ConcurrentHashMap<String, DisposableLock> locks = new ConcurrentHashMap<>(); private final int acquireAttempts; private final int maximumQueued; private final int lockAttemptTimeout; private static Set<Thread> threadsWaitingForMyLocks(Thread owner) { return threadsWaitingForMyLocks(owner, INSTANCES); } private static Set<Thread> threadsWaitingForMyLocks(Thread owner, Set<PessimisticLockFactory> locksInUse) { Set<Thread> waitingThreads = new HashSet<>(); for (PessimisticLockFactory lock : locksInUse) { lock.locks.values().stream() .filter(disposableLock -> disposableLock.isHeldBy(owner)) .forEach(disposableLock -> disposableLock.queuedThreads().stream() .filter(waitingThreads::add) .forEach(thread -> waitingThreads.addAll(threadsWaitingForMyLocks(thread, locksInUse)))); } return waitingThreads; } /** * Creates a builder to construct an instance of this LockFactory. * * @return a builder allowing the definition of properties for this Lock Factory. */ public static Builder builder() { return new Builder(); } /** * Creates an instance using default values, as defined in the properties of the {@link Builder}. * * @return a PessimisticLockFactory instance using sensible default values */ public static PessimisticLockFactory usingDefaults() { return builder().build(); } /** * Creates an instance of the lock factory using the given {@code builder} containing the configuration properties * to use. * * @param builder The building containing the configuration properties to use */ protected PessimisticLockFactory(Builder builder) { this.acquireAttempts = builder.acquireAttempts; this.maximumQueued = builder.maximumQueued; this.lockAttemptTimeout = builder.lockAttemptTimeout; INSTANCES.add(this); } /** * Obtain a lock for a resource identified by the given {@code identifier}. This method will block until a * lock was successfully obtained. * <p/> * Note: when an exception occurs during the locking process, the lock may or may not have been allocated. * * @param identifier the identifier of the lock to obtain. * @return a handle to release the lock. If the thread that releases the lock does not hold the lock * {@link IllegalMonitorStateException} is thrown * {@link IllegalArgumentException} is thrown when identifier is null */ @Override public Lock obtainLock(String identifier) { Assert.nonNull(identifier, () -> "Aggregate identifier may not be null"); boolean lockObtained = false; DisposableLock lock = null; while (!lockObtained) { lock = lockFor(identifier); lockObtained = lock.lock(); if (!lockObtained) { locks.remove(identifier, lock); } } return lock; } private DisposableLock lockFor(String identifier) { DisposableLock lock = locks.get(identifier); while (lock == null) { locks.putIfAbsent(identifier, new DisposableLock(identifier)); lock = locks.get(identifier); } return lock; } private static final class PubliclyOwnedReentrantLock extends ReentrantLock { private static final long serialVersionUID = -2259228494514612163L; @Override public Collection<Thread> getQueuedThreads() { // NOSONAR return super.getQueuedThreads(); } public boolean isHeldBy(Thread thread) { return thread.equals(getOwner()); } } /** * Builder class for the {@link PessimisticLockFactory}. */ public static class Builder { private int acquireAttempts = 100; private int maximumQueued = Integer.MAX_VALUE; private int lockAttemptTimeout = 600; /** * Default constructor */ protected Builder() { } /** * Indicates howmany attempts should be done to acquire a lock. In combination with the * {@link #lockAttemptTimeout(int)}, this defines the total timeout of a lock acquisition. * <p> * Defaults to 100. * * @param acquireAttempts The number of attempts to acquire the lock * @return this Builder, for further configuration */ public Builder acquireAttempts(int acquireAttempts) { Assert.isTrue( acquireAttempts > 0 || acquireAttempts == -1, () -> "acquireAttempts needs to be a positive integer or -1, but was '" + acquireAttempts + "'" ); this.acquireAttempts = acquireAttempts; return this; } /** * Defines the maximum number of queued threads to allow for this lock. If the given number of threads are * waiting to acquire a lock, and another thread joins, that thread will immediately fail any attempt to acquire * the lock, as if it had timed out. * <p> * Defaults to unbounded. * * @param maximumQueued The maximum number of threads to allow in the queue for this lock * @return this Builder, for further configuration */ public Builder queueLengthThreshold(int maximumQueued) { Assert.isTrue( maximumQueued > 0, () -> "queueLengthThreshold needs to be a positive integer, but was '" + maximumQueued + "'" ); this.maximumQueued = maximumQueued; return this; } /** * The duration of a single attempt to acquire the internal lock. In combination with the * {@link #acquireAttempts(int)}, this defines the total timeout of an acquisition attempt. * <p> * Defaults to 600ms. * * @param lockAttemptTimeout The duration of a single aqcuisition attempt of the internal lock, in milliseconds * @return this Builder, for further configuration */ public Builder lockAttemptTimeout(int lockAttemptTimeout) { Assert.isTrue( lockAttemptTimeout >= 0, () -> "lockAttemptTimeout needs to be a non negative integer, but was '" + lockAttemptTimeout + "'" ); this.lockAttemptTimeout = lockAttemptTimeout; return this; } /** * Builds the PessimisticLockFactory instance using the properties defined in this builder * * @return a fully configured PessimisticLockFactory instance */ public PessimisticLockFactory build() { return new PessimisticLockFactory(this); } } private class DisposableLock implements Lock { private final String identifier; private final PubliclyOwnedReentrantLock lock; private volatile boolean isClosed = false; private DisposableLock(String identifier) { this.identifier = identifier; this.lock = new PubliclyOwnedReentrantLock(); } @Override public void release() { try { lock.unlock(); } finally { disposeIfUnused(); } } @Override public boolean isHeld() { return lock.isHeldByCurrentThread(); } public boolean lock() { if (lock.getQueueLength() >= maximumQueued) { throw new LockAcquisitionFailedException("Failed to acquire lock for aggregate identifier " + identifier + ": too many queued threads."); } try { if (!lock.tryLock(0, TimeUnit.NANOSECONDS)) { int attempts = acquireAttempts - 1; do { attempts--; checkForDeadlock(); if (attempts < 1) { throw new LockAcquisitionFailedException( "Failed to acquire lock for aggregate identifier(" + identifier + "), maximum attempts exceeded (" + acquireAttempts + ")" ); } } while (!lock.tryLock(lockAttemptTimeout, TimeUnit.MILLISECONDS)); } } catch (InterruptedException e) { throw new LockAcquisitionFailedException("Thread was interrupted", e); } if (isClosed) { lock.unlock(); return false; } return true; } private void checkForDeadlock() { if (!lock.isHeldByCurrentThread() && lock.isLocked()) { for (Thread thread : threadsWaitingForMyLocks(Thread.currentThread())) { if (lock.isHeldBy(thread)) { throw new DeadlockException( "An imminent deadlock was detected while attempting to acquire a lock" ); } } } } private void disposeIfUnused() { if (lock.tryLock()) { try { if (lock.getHoldCount() == 1) { // we now have a lock. We can shut it down. isClosed = true; locks.remove(identifier, this); } } finally { lock.unlock(); } } } public Collection<Thread> queuedThreads() { return lock.getQueuedThreads(); } public boolean isHeldBy(Thread owner) { return lock.isHeldBy(owner); } } }
organizing imports
messaging/src/main/java/org/axonframework/common/lock/PessimisticLockFactory.java
organizing imports
<ide><path>essaging/src/main/java/org/axonframework/common/lock/PessimisticLockFactory.java <ide> <ide> import org.axonframework.common.Assert; <ide> <del>import java.util.*; <add>import java.util.Collection; <add>import java.util.HashSet; <add>import java.util.Set; <add>import java.util.WeakHashMap; <ide> import java.util.concurrent.ConcurrentHashMap; <ide> import java.util.concurrent.TimeUnit; <ide> import java.util.concurrent.locks.ReentrantLock; <ide> <ide> import static java.util.Collections.newSetFromMap; <ide> import static java.util.Collections.synchronizedMap; <del>import static org.axonframework.common.Assert.assertThat; <ide> <ide> /** <ide> * Implementation of a {@link LockFactory} that uses a pessimistic locking strategy. Calls to
Java
mit
error: pathspec 'src/progII/exercise03/Singleton.java' did not match any file(s) known to git
2af20d44ce6a80ea558667a872b7148b1f71b424
1
Sebb767/Prog,Sebb767/Prog
package exercise03; // Die nachfolgende Klasse soll genau einmal instanziiert werden koennen. // Dazu kann der Trick angewendet werden, den Konstruktor private zu machen // und fuer die Generierung der Instanz eine eigene Methode getInstance // vorzusehen. public class Singleton { // Diese Variable soll spaeter die *eine* Instanz aufnehmen private static Singleton mySingleton = null; // Diese Variable dient dazu, die korrekte Funktion zu ueberpruefen private int i = 3; // ******************************************************************* // Hier fehlen der Konstruktor und die Methode getInstance // private Singleton() {} public static Singleton getInstance() { if(mySingleton == null) mySingleton = new Singleton(); return mySingleton; } // ******************************************************************* // Mit Hilfe der nachfolgenden main-Methode koennen Sie ueberpruefen, // ob Ihre Implementierung funktioniert. // Hinweis: Anfangs kompiliert nachfolgener Code nicht. // Sie muessen zunaechst die fehlenden Methoden implementieren! public static void main(String[] args) { System.out.println("Start der Tests..."); Singleton s1 = Singleton.getInstance(); Singleton s2 = Singleton.getInstance(); if (s1 == null) { System.out.println ("Fehler: getInstance liefert beim ersten Aufruf null zurueck."); return; } if (s2 == null) { System.out.println ("Fehler: getInstance liefert beim zweiten Aufruf null zurueck."); return; } s1.i = 17; if (s2.i == 17) System.out.println ("Ihre Singleton-Implementierung ist korrekt."); else System.out.println ("Ihre Singleton-Implementierung ist fehlerhaft."); System.out.println("Testende"); } }
src/progII/exercise03/Singleton.java
Aufgabe 3
src/progII/exercise03/Singleton.java
Aufgabe 3
<ide><path>rc/progII/exercise03/Singleton.java <add>package exercise03; <add> <add> <add>// Die nachfolgende Klasse soll genau einmal instanziiert werden koennen. <add>// Dazu kann der Trick angewendet werden, den Konstruktor private zu machen <add>// und fuer die Generierung der Instanz eine eigene Methode getInstance <add>// vorzusehen. <add> <add>public class Singleton { <add> <add> // Diese Variable soll spaeter die *eine* Instanz aufnehmen <add> private static Singleton mySingleton = null; <add> <add> // Diese Variable dient dazu, die korrekte Funktion zu ueberpruefen <add> private int i = 3; <add> <add> <add> // ******************************************************************* <add> // Hier fehlen der Konstruktor und die Methode getInstance <add> // <add> <add> private Singleton() {} <add> <add> public static Singleton getInstance() <add> { <add> if(mySingleton == null) <add> mySingleton = new Singleton(); <add> <add> return mySingleton; <add> } <add> <add> <add> // ******************************************************************* <add> <add> <add> // Mit Hilfe der nachfolgenden main-Methode koennen Sie ueberpruefen, <add> // ob Ihre Implementierung funktioniert. <add> <add> // Hinweis: Anfangs kompiliert nachfolgener Code nicht. <add> // Sie muessen zunaechst die fehlenden Methoden implementieren! <add> <add> public static void main(String[] args) <add> { <add> <add> System.out.println("Start der Tests..."); <add> <add> Singleton s1 = Singleton.getInstance(); <add> Singleton s2 = Singleton.getInstance(); <add> <add> if (s1 == null) <add> { <add> System.out.println ("Fehler: getInstance liefert beim ersten Aufruf null zurueck."); <add> return; <add> } <add> <add> if (s2 == null) <add> { <add> System.out.println ("Fehler: getInstance liefert beim zweiten Aufruf null zurueck."); <add> return; <add> } <add> <add> s1.i = 17; <add> <add> if (s2.i == 17) <add> System.out.println ("Ihre Singleton-Implementierung ist korrekt."); <add> else <add> System.out.println ("Ihre Singleton-Implementierung ist fehlerhaft."); <add> <add> System.out.println("Testende"); <add> } <add>}
Java
apache-2.0
aa751bfba7f1305c8fdf8586f474d51990f81410
0
YoungDigitalPlanet/empiria.player,YoungDigitalPlanet/empiria.player,YoungDigitalPlanet/empiria.player
package eu.ydp.empiria.player.client.module.connection.presenter; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import eu.ydp.empiria.player.client.module.MarkAnswersMode; import eu.ydp.empiria.player.client.module.MarkAnswersType; import eu.ydp.empiria.player.client.module.ModuleSocket; import eu.ydp.empiria.player.client.module.ShowAnswersType; import eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleConnectType; import eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleView; import eu.ydp.empiria.player.client.module.connection.ConnectionModuleModel; import eu.ydp.empiria.player.client.module.connection.structure.MatchInteractionBean; import eu.ydp.empiria.player.client.module.connection.structure.SimpleAssociableChoiceBean; import eu.ydp.empiria.player.client.util.events.multiplepair.PairConnectEvent; import eu.ydp.empiria.player.client.util.events.multiplepair.PairConnectEventHandler; import eu.ydp.gwtutil.client.collections.KeyValue; import eu.ydp.gwtutil.client.debug.gwtlogger.ILogger; import eu.ydp.gwtutil.client.debug.gwtlogger.Logger; import java.util.List; import static eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleConnectType.*; public class ConnectionModulePresenterImpl implements ConnectionModulePresenter, PairConnectEventHandler { ILogger log = new Logger(); private MatchInteractionBean bean; ConnectionModuleModel model; private boolean isAnswerTypeEqualsCorrect; @Inject private MultiplePairModuleView moduleView; private ModuleSocket moduleSocket; @Override public void setModuleSocket(ModuleSocket moduleSocket) { this.moduleSocket = moduleSocket; } @Override public void bindView() { moduleView.addPairConnectEventHandler(this); moduleView.setBean(bean); moduleView.setModuleSocket(moduleSocket); moduleView.bindView(); moduleView.reset(); } @Override public void reset() { moduleView.reset(); } @Override public void setModuleView(MultiplePairModuleView<SimpleAssociableChoiceBean> moduleView) { this.moduleView = moduleView; } @Override public void setModel(ConnectionModuleModel model) { this.model = model; } @Override public void setBean(MatchInteractionBean bean) { this.bean = bean; } @Override public void setLocked(boolean locked) { moduleView.setLocked(locked); } @Override public void showAnswers(ShowAnswersType mode) { List<KeyValue<String, String>> answers = (mode == ShowAnswersType.CORRECT) ? model.getCorrectAnswers() : model.getCurrentAnswers(); showAnswers(answers, (mode == ShowAnswersType.CORRECT) ? NONE : NORMAL); if (mode == ShowAnswersType.CORRECT) { isAnswerTypeEqualsCorrect = true; } else { isAnswerTypeEqualsCorrect = false; } } @Override public void markAnswers(MarkAnswersType type, MarkAnswersMode mode) { setAnswersMarked(mode == MarkAnswersMode.MARK, (type == MarkAnswersType.CORRECT) ? CORRECT : WRONG); } @Override public Widget asWidget() { return moduleView.asWidget(); } @Override public void onConnectionEvent(PairConnectEvent event) { ConnectionDirectedPairDTO pair = getDirectedPair(event); switch (event.getType()) { case CONNECTED: addAnswerToResponseIfConnectionValidOnUserAction(event, pair); break; case DISCONNECTED: disconnectOnUserAction(event, pair); break; case REPAINT_VIEW: repaintViewFromResponseModel(); break; case WRONG_CONNECTION: default: handleWrongConnection(); break; } } private void repaintViewFromResponseModel() { if (moduleView.isAttached()) { reset(); showAnswers(isAnswerTypeEqualsCorrect ? ShowAnswersType.CORRECT : ShowAnswersType.USER); } } private void disconnectOnUserAction(PairConnectEvent event, ConnectionDirectedPairDTO pair) { if (event.isUserAction() && pair.getSource() != null && pair.getTarget() != null) { model.removeAnswer(pair.toString()); } } private void addAnswerToResponseIfConnectionValidOnUserAction(PairConnectEvent event, ConnectionDirectedPairDTO pair) { if (event.isUserAction()) { addAnswerToResponseIfConnectionValid(event, pair); } } private void addAnswerToResponseIfConnectionValid(PairConnectEvent event, ConnectionDirectedPairDTO pair) { boolean isResponseExists = model.checkUserResonseContainsAnswer(pair.toString()); if (isConnectionValid(pair) && !isResponseExists) { model.addAnswer(pair.toString()); } else { moduleView.disconnect(event.getSourceItem(), event.getTargetItem()); } } private void handleWrongConnection() { log.warning("ConnectionModulePresenter: wrong connection"); } private ConnectionDirectedPairDTO getDirectedPair(PairConnectEvent event) { String start = event.getSourceItem(); String end = event.getTargetItem(); return createDirectedPair(start, end); } private ConnectionDirectedPairDTO createDirectedPair(String start, String end) { ConnectionDirectedPairDTO pair = new ConnectionDirectedPairDTO(); List<String> sourceChoicesIdentifiersSet = bean.getSourceChoicesIdentifiersSet(); List<String> targetChoicesIdentifiersSet = bean.getTargetChoicesIdentifiersSet(); setDirectedPairNodeByIdentyfier(pair, start, sourceChoicesIdentifiersSet, targetChoicesIdentifiersSet); setDirectedPairNodeByIdentyfier(pair, end, sourceChoicesIdentifiersSet, targetChoicesIdentifiersSet); return pair; } private void setDirectedPairNodeByIdentyfier(ConnectionDirectedPairDTO pair, String choosenIdentifier, List<String> sourceChoicesIdentifiersSet, List<String> targetChoicesIdentifiersSet) { if (sourceChoicesIdentifiersSet.contains(choosenIdentifier)) { pair.setSource(choosenIdentifier); } else if (targetChoicesIdentifiersSet.contains(choosenIdentifier)) { pair.setTarget(choosenIdentifier); } } private boolean isConnectionValid(ConnectionDirectedPairDTO pair) { int errorsCount = 0; if (isPairValid(pair)) { errorsCount++; } else if (isMaxAssociationAchieved()) { errorsCount++; } else if (isSourceMatchMaxAchieved(pair)) { errorsCount++; } else if (isTargetMatchMaxAchieved(pair)) { errorsCount++; } return errorsCount == 0; } private boolean isPairValid(ConnectionDirectedPairDTO pair) { return pair.getSource() == null || pair.getTarget() == null; } /** * Individually, each choice has a matchMax attribute that controls how many * pairings it can be part of. */ private boolean matchMaxCondition(String identifier) { SimpleAssociableChoiceBean beanChoiceIdentifier = bean.getChoiceByIdentifier(identifier); int matchMax = beanChoiceIdentifier.getMatchMax(); int currentChoicePairingsNumber = model.getCurrentChoicePairingsNumber(identifier); return matchMax > 0 && currentChoicePairingsNumber >= matchMax; } private boolean isSourceMatchMaxAchieved(ConnectionDirectedPairDTO pair) { return matchMaxCondition(pair.getSource()); } private boolean isTargetMatchMaxAchieved(ConnectionDirectedPairDTO pair) { return matchMaxCondition(pair.getTarget()); } /** * The maxAssociations attribute controls the maximum number of pairings the * user is allowed to make overall. */ private boolean isMaxAssociationAchieved() { int maxAssociations = bean.getMaxAssociations(); int currentOverallPairingsNumber = model.getCurrentOverallPairingsNumber(); return maxAssociations > 0 && currentOverallPairingsNumber >= maxAssociations; } /** * Sets connections in view using given {@link KeyValue} collection for * defined {@link MultiplePairModuleConnectType} * * @param answers * @param type */ private void showAnswers(List<KeyValue<String, String>> answers, MultiplePairModuleConnectType type) { moduleView.reset(); for (KeyValue<String, String> answer : answers) { moduleView.connect(answer.getKey(), answer.getValue(), type); } } /** * Marks / unmarks answers * * @param markMode - {@link Boolean} mark/unmark * @param markingType - {@link MultiplePairModuleConnectType#CORRECT} or * {@link MultiplePairModuleConnectType#WRONG} */ private void setAnswersMarked(boolean markMode, MultiplePairModuleConnectType markingType) { List<Boolean> responseEvaluated = model.evaluateResponse(); List<KeyValue<String, String>> currentAnswers = model.getCurrentAnswers(); int responseCnt = 0; for (Boolean isCorrect : responseEvaluated) { MultiplePairModuleConnectType type = (isCorrect) ? MultiplePairModuleConnectType.CORRECT : MultiplePairModuleConnectType.WRONG; KeyValue<String, String> answersPair = currentAnswers.get(responseCnt); if (markingType.equals(type)) { connectOrDisconnectByMarkMode(markMode, type, answersPair); } responseCnt++; } } private void connectOrDisconnectByMarkMode(boolean markMode, MultiplePairModuleConnectType type, KeyValue<String, String> answersPair) { moduleView.disconnect(answersPair.getKey(), answersPair.getValue()); if (markMode) { moduleView.connect(answersPair.getKey(), answersPair.getValue(), type); // TODO: jesli dana pozycja nie jest zaznaczona wcale to // wyslac MultiplePairModuleConnectType.NONE ?? } else { moduleView.connect(answersPair.getKey(), answersPair.getValue(), MultiplePairModuleConnectType.NORMAL); } } }
src/main/java/eu/ydp/empiria/player/client/module/connection/presenter/ConnectionModulePresenterImpl.java
package eu.ydp.empiria.player.client.module.connection.presenter; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; import eu.ydp.empiria.player.client.module.MarkAnswersMode; import eu.ydp.empiria.player.client.module.MarkAnswersType; import eu.ydp.empiria.player.client.module.ModuleSocket; import eu.ydp.empiria.player.client.module.ShowAnswersType; import eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleConnectType; import eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleView; import eu.ydp.empiria.player.client.module.connection.ConnectionModuleModel; import eu.ydp.empiria.player.client.module.connection.structure.MatchInteractionBean; import eu.ydp.empiria.player.client.module.connection.structure.SimpleAssociableChoiceBean; import eu.ydp.empiria.player.client.util.events.multiplepair.PairConnectEvent; import eu.ydp.empiria.player.client.util.events.multiplepair.PairConnectEventHandler; import eu.ydp.gwtutil.client.collections.KeyValue; import eu.ydp.gwtutil.client.debug.gwtlogger.ILogger; import eu.ydp.gwtutil.client.debug.gwtlogger.Logger; import java.util.List; import static eu.ydp.empiria.player.client.module.components.multiplepair.MultiplePairModuleConnectType.*; public class ConnectionModulePresenterImpl implements ConnectionModulePresenter, PairConnectEventHandler { ILogger log = new Logger(); private MatchInteractionBean bean; ConnectionModuleModel model; @Inject private MultiplePairModuleView moduleView; private ModuleSocket moduleSocket; @Override public void setModuleSocket(ModuleSocket moduleSocket) { this.moduleSocket = moduleSocket; } @Override public void bindView() { moduleView.addPairConnectEventHandler(this); moduleView.setBean(bean); moduleView.setModuleSocket(moduleSocket); moduleView.bindView(); moduleView.reset(); } @Override public void reset() { moduleView.reset(); } @Override public void setModuleView(MultiplePairModuleView<SimpleAssociableChoiceBean> moduleView) { this.moduleView = moduleView; } @Override public void setModel(ConnectionModuleModel model) { this.model = model; } @Override public void setBean(MatchInteractionBean bean) { this.bean = bean; } @Override public void setLocked(boolean locked) { moduleView.setLocked(locked); } @Override public void showAnswers(ShowAnswersType mode) { List<KeyValue<String, String>> answers = (mode == ShowAnswersType.CORRECT) ? model.getCorrectAnswers() : model.getCurrentAnswers(); showAnswers(answers, (mode == ShowAnswersType.CORRECT) ? NONE : NORMAL); } @Override public void markAnswers(MarkAnswersType type, MarkAnswersMode mode) { setAnswersMarked(mode == MarkAnswersMode.MARK, (type == MarkAnswersType.CORRECT) ? CORRECT : WRONG); } @Override public Widget asWidget() { return moduleView.asWidget(); } @Override public void onConnectionEvent(PairConnectEvent event) { ConnectionDirectedPairDTO pair = getDirectedPair(event); switch (event.getType()) { case CONNECTED: addAnswerToResponseIfConnectionValidOnUserAction(event, pair); break; case DISCONNECTED: disconnectOnUserAction(event, pair); break; case REPAINT_VIEW: repaintViewFromResponseModel(); break; case WRONG_CONNECTION: default: handleWrongConnection(); break; } } private void repaintViewFromResponseModel() { if (moduleView.isAttached()) { reset(); showAnswers(ShowAnswersType.USER); } } private void disconnectOnUserAction(PairConnectEvent event, ConnectionDirectedPairDTO pair) { if (event.isUserAction() && pair.getSource() != null && pair.getTarget() != null) { model.removeAnswer(pair.toString()); } } private void addAnswerToResponseIfConnectionValidOnUserAction(PairConnectEvent event, ConnectionDirectedPairDTO pair) { if (event.isUserAction()) { addAnswerToResponseIfConnectionValid(event, pair); } } private void addAnswerToResponseIfConnectionValid(PairConnectEvent event, ConnectionDirectedPairDTO pair) { boolean isResponseExists = model.checkUserResonseContainsAnswer(pair.toString()); if (isConnectionValid(pair) && !isResponseExists) { model.addAnswer(pair.toString()); } else { moduleView.disconnect(event.getSourceItem(), event.getTargetItem()); } } private void handleWrongConnection() { log.warning("ConnectionModulePresenter: wrong connection"); } private ConnectionDirectedPairDTO getDirectedPair(PairConnectEvent event) { String start = event.getSourceItem(); String end = event.getTargetItem(); return createDirectedPair(start, end); } private ConnectionDirectedPairDTO createDirectedPair(String start, String end) { ConnectionDirectedPairDTO pair = new ConnectionDirectedPairDTO(); List<String> sourceChoicesIdentifiersSet = bean.getSourceChoicesIdentifiersSet(); List<String> targetChoicesIdentifiersSet = bean.getTargetChoicesIdentifiersSet(); setDirectedPairNodeByIdentyfier(pair, start, sourceChoicesIdentifiersSet, targetChoicesIdentifiersSet); setDirectedPairNodeByIdentyfier(pair, end, sourceChoicesIdentifiersSet, targetChoicesIdentifiersSet); return pair; } private void setDirectedPairNodeByIdentyfier(ConnectionDirectedPairDTO pair, String choosenIdentifier, List<String> sourceChoicesIdentifiersSet, List<String> targetChoicesIdentifiersSet) { if (sourceChoicesIdentifiersSet.contains(choosenIdentifier)) { pair.setSource(choosenIdentifier); } else if (targetChoicesIdentifiersSet.contains(choosenIdentifier)) { pair.setTarget(choosenIdentifier); } } private boolean isConnectionValid(ConnectionDirectedPairDTO pair) { int errorsCount = 0; if (isPairValid(pair)) { errorsCount++; } else if (isMaxAssociationAchieved()) { errorsCount++; } else if (isSourceMatchMaxAchieved(pair)) { errorsCount++; } else if (isTargetMatchMaxAchieved(pair)) { errorsCount++; } return errorsCount == 0; } private boolean isPairValid(ConnectionDirectedPairDTO pair) { return pair.getSource() == null || pair.getTarget() == null; } /** * Individually, each choice has a matchMax attribute that controls how many * pairings it can be part of. */ private boolean matchMaxCondition(String identifier) { SimpleAssociableChoiceBean beanChoiceIdentifier = bean.getChoiceByIdentifier(identifier); int matchMax = beanChoiceIdentifier.getMatchMax(); int currentChoicePairingsNumber = model.getCurrentChoicePairingsNumber(identifier); return matchMax > 0 && currentChoicePairingsNumber >= matchMax; } private boolean isSourceMatchMaxAchieved(ConnectionDirectedPairDTO pair) { return matchMaxCondition(pair.getSource()); } private boolean isTargetMatchMaxAchieved(ConnectionDirectedPairDTO pair) { return matchMaxCondition(pair.getTarget()); } /** * The maxAssociations attribute controls the maximum number of pairings the * user is allowed to make overall. */ private boolean isMaxAssociationAchieved() { int maxAssociations = bean.getMaxAssociations(); int currentOverallPairingsNumber = model.getCurrentOverallPairingsNumber(); return maxAssociations > 0 && currentOverallPairingsNumber >= maxAssociations; } /** * Sets connections in view using given {@link KeyValue} collection for * defined {@link MultiplePairModuleConnectType} * * @param answers * @param type */ private void showAnswers(List<KeyValue<String, String>> answers, MultiplePairModuleConnectType type) { moduleView.reset(); for (KeyValue<String, String> answer : answers) { moduleView.connect(answer.getKey(), answer.getValue(), type); } } /** * Marks / unmarks answers * * @param markMode - {@link Boolean} mark/unmark * @param markingType - {@link MultiplePairModuleConnectType#CORRECT} or * {@link MultiplePairModuleConnectType#WRONG} */ private void setAnswersMarked(boolean markMode, MultiplePairModuleConnectType markingType) { List<Boolean> responseEvaluated = model.evaluateResponse(); List<KeyValue<String, String>> currentAnswers = model.getCurrentAnswers(); int responseCnt = 0; for (Boolean isCorrect : responseEvaluated) { MultiplePairModuleConnectType type = (isCorrect) ? MultiplePairModuleConnectType.CORRECT : MultiplePairModuleConnectType.WRONG; KeyValue<String, String> answersPair = currentAnswers.get(responseCnt); if (markingType.equals(type)) { connectOrDisconnectByMarkMode(markMode, type, answersPair); } responseCnt++; } } private void connectOrDisconnectByMarkMode(boolean markMode, MultiplePairModuleConnectType type, KeyValue<String, String> answersPair) { moduleView.disconnect(answersPair.getKey(), answersPair.getValue()); if (markMode) { moduleView.connect(answersPair.getKey(), answersPair.getValue(), type); // TODO: jesli dana pozycja nie jest zaznaczona wcale to // wyslac MultiplePairModuleConnectType.NONE ?? } else { moduleView.connect(answersPair.getKey(), answersPair.getValue(), MultiplePairModuleConnectType.NORMAL); } } }
[YPUB-6663] Added state of answer type.
src/main/java/eu/ydp/empiria/player/client/module/connection/presenter/ConnectionModulePresenterImpl.java
[YPUB-6663] Added state of answer type.
<ide><path>rc/main/java/eu/ydp/empiria/player/client/module/connection/presenter/ConnectionModulePresenterImpl.java <ide> <ide> ConnectionModuleModel model; <ide> <add> private boolean isAnswerTypeEqualsCorrect; <add> <ide> @Inject <ide> private MultiplePairModuleView moduleView; <ide> <ide> public void showAnswers(ShowAnswersType mode) { <ide> List<KeyValue<String, String>> answers = (mode == ShowAnswersType.CORRECT) ? model.getCorrectAnswers() : model.getCurrentAnswers(); <ide> showAnswers(answers, (mode == ShowAnswersType.CORRECT) ? NONE : NORMAL); <add> if (mode == ShowAnswersType.CORRECT) { <add> isAnswerTypeEqualsCorrect = true; <add> } <add> else { <add> isAnswerTypeEqualsCorrect = false; <add> } <ide> } <ide> <ide> @Override <ide> } <ide> } <ide> <add> <ide> private void repaintViewFromResponseModel() { <ide> if (moduleView.isAttached()) { <ide> reset(); <del> showAnswers(ShowAnswersType.USER); <add> showAnswers(isAnswerTypeEqualsCorrect ? ShowAnswersType.CORRECT : ShowAnswersType.USER); <ide> } <ide> } <ide>
Java
bsd-3-clause
32d67a1ca690b0c3cc95808384cf0cdb14d186c0
0
jenkinsci/jaxen
/* * $Header$ * $Revision: 640 $ * $Date: 2005-04-16 08:32:53 -0700 (Sat, 16 Apr 2005) $ * * ==================================================================== * * Copyright (C) 2000-2002 bob mcwhirter & James Strachan. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions, and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions, and the disclaimer that follows * these conditions in the documentation and/or other materials * provided with the distribution. * * 3. The name "Jaxen" must not be used to endorse or promote products * derived from this software without prior written permission. For * written permission, please contact [email protected]. * * 4. Products derived from this software may not be called "Jaxen", nor * may "Jaxen" appear in their name, without prior written permission * from the Jaxen Project Management ([email protected]). * * In addition, we request (but do not require) that you include in the * end-user documentation provided with the redistribution and/or in the * software itself an acknowledgement equivalent to the following: * "This product includes software developed by the * Jaxen Project (http://www.jaxen.org/)." * Alternatively, the acknowledgment may be graphical using the logos * available at http://www.jaxen.org/ * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE Jaxen AUTHORS OR THE PROJECT * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * ==================================================================== * This software consists of voluntary contributions made by many * individuals on behalf of the Jaxen Project and was originally * created by bob mcwhirter <[email protected]> and * James Strachan <[email protected]>. For more information on the * Jaxen Project, please see <http://www.jaxen.org/>. * * $Id: NamespaceContext.java 640 2005-04-16 15:32:53Z elharo $ */ package org.jaxen; /** Resolves namespace prefixes to namespace URIs. * * <p> * The prefixes used within an XPath expression are * independent of those used within any target document. * When evaluating an XPath against a document, only * the resolved namespace URIs are compared, not their * prefixes. * </p> * * <p> * A <code>NamespaceContext</code> is responsible for * translating prefixes as they appear in XPath expressions * into URIs for comparison. A document's prefixes are * resolved internal to the document based upon its own * namespace nodes. * </p> * * @see BaseXPath * @see Navigator#getElementNamespaceUri * @see Navigator#getAttributeNamespaceUri * * @author <a href="mailto:[email protected]">bob mcwhirter</a> */ public interface NamespaceContext { /** Translate the provided namespace prefix into * the matching bound namespace URI. * * <p> * In XPath, there is no such thing as a 'default namespace'. * The empty prefix <strong>always</strong> resolves to the empty * namespace URI. Similarly, the prefix "xml" always resolves to * the URI "http://www.w3.org/XML/1998/namespace". * </p> * * @param prefix the namespace prefix to resolve * * @return the namespace URI bound to the prefix; or null if there * is no such namespace */ String translateNamespacePrefixToUri(String prefix); }
src/java/main/org/jaxen/NamespaceContext.java
/* * $Header$ * $Revision: 460 $ * $Date: 2005-02-10 09:35:01 -0800 (Thu, 10 Feb 2005) $ * * ==================================================================== * * Copyright (C) 2000-2002 bob mcwhirter & James Strachan. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions, and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions, and the disclaimer that follows * these conditions in the documentation and/or other materials * provided with the distribution. * * 3. The name "Jaxen" must not be used to endorse or promote products * derived from this software without prior written permission. For * written permission, please contact [email protected]. * * 4. Products derived from this software may not be called "Jaxen", nor * may "Jaxen" appear in their name, without prior written permission * from the Jaxen Project Management ([email protected]). * * In addition, we request (but do not require) that you include in the * end-user documentation provided with the redistribution and/or in the * software itself an acknowledgement equivalent to the following: * "This product includes software developed by the * Jaxen Project (http://www.jaxen.org/)." * Alternatively, the acknowledgment may be graphical using the logos * available at http://www.jaxen.org/ * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE Jaxen AUTHORS OR THE PROJECT * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * ==================================================================== * This software consists of voluntary contributions made by many * individuals on behalf of the Jaxen Project and was originally * created by bob mcwhirter <[email protected]> and * James Strachan <[email protected]>. For more information on the * Jaxen Project, please see <http://www.jaxen.org/>. * * $Id: NamespaceContext.java 460 2005-02-10 17:35:01Z elharo $ */ package org.jaxen; /** Resolves namespace prefixes to namespace URIs. * * <p> * The prefixes used within an XPath expression are * independant of those used within any target document. * When evaluating an XPath against a document, only * the resolved namespace URIs are compared, not their * prefixes. * </p> * * <p> * A <code>NamespaceContext</code> is responsible for * translating prefixes as they appear in XPath expressions * into URIs for comparison. A document's prefixes are * resolved internal to the document based upon its own * namespace nodes. * </p> * * @see BaseXPath * @see Navigator#getElementNamespaceUri * @see Navigator#getAttributeNamespaceUri * * @author <a href="mailto:[email protected]">bob mcwhirter</a> */ public interface NamespaceContext { /** Translate the provided namespace prefix into * the matching bound namespace URI. * * <p> * In XPath, there is no such thing as a 'default namespace'. * The empty prefix <strong>always</strong> resolves to the empty * namespace URI. * </p> * * @param prefix the namespace prefix to resolve * * @return the namespace URI matching the prefix */ String translateNamespacePrefixToUri(String prefix); }
Specifying that unbound prefixes return null for their namespace URIs
src/java/main/org/jaxen/NamespaceContext.java
Specifying that unbound prefixes return null for their namespace URIs
<ide><path>rc/java/main/org/jaxen/NamespaceContext.java <ide> /* <ide> * $Header$ <del> * $Revision: 460 $ <del> * $Date: 2005-02-10 09:35:01 -0800 (Thu, 10 Feb 2005) $ <add> * $Revision: 640 $ <add> * $Date: 2005-04-16 08:32:53 -0700 (Sat, 16 Apr 2005) $ <ide> * <ide> * ==================================================================== <ide> * <ide> * James Strachan <[email protected]>. For more information on the <ide> * Jaxen Project, please see <http://www.jaxen.org/>. <ide> * <del> * $Id: NamespaceContext.java 460 2005-02-10 17:35:01Z elharo $ <add> * $Id: NamespaceContext.java 640 2005-04-16 15:32:53Z elharo $ <ide> */ <ide> <ide> <ide> * <ide> * <p> <ide> * The prefixes used within an XPath expression are <del> * independant of those used within any target document. <add> * independent of those used within any target document. <ide> * When evaluating an XPath against a document, only <ide> * the resolved namespace URIs are compared, not their <ide> * prefixes. <ide> * <p> <ide> * In XPath, there is no such thing as a 'default namespace'. <ide> * The empty prefix <strong>always</strong> resolves to the empty <del> * namespace URI. <add> * namespace URI. Similarly, the prefix "xml" always resolves to <add> * the URI "http://www.w3.org/XML/1998/namespace". <ide> * </p> <ide> * <ide> * @param prefix the namespace prefix to resolve <ide> * <del> * @return the namespace URI matching the prefix <add> * @return the namespace URI bound to the prefix; or null if there <add> * is no such namespace <ide> */ <ide> String translateNamespacePrefixToUri(String prefix); <ide> }
Java
mit
9fad1999772fab7e1fc9470e662df28bf9f420e8
0
RoboEagles4828/LandrovalToast,RoboEagles4828/LandrovalToast
package frc.team4828.landrovaltoast; import org.junit.Test; import static org.junit.Assert.assertEquals; public class Tester { @Test public void test() { System.out.println("You're currently testing Tester!"); TestClass test = new TestClass(); int returned = test.testTester(3); System.out.println("Number: " + returned); assertEquals(6, returned); } }
src/main/test/frc/team4828/landrovaltoast/Tester.java
package frc.team4828.landrovaltoast; import org.junit.Test; import static org.junit.Assert.assertEquals; ; public class Tester { @Test public void test() { System.out.println("You're currently testing Tester!"); TestClass test = new TestClass(); int returned = test.testTester(3); System.out.println("Number: " + returned); assertEquals(6, returned); } }
Fixed a Typo
src/main/test/frc/team4828/landrovaltoast/Tester.java
Fixed a Typo
<ide><path>rc/main/test/frc/team4828/landrovaltoast/Tester.java <ide> import org.junit.Test; <ide> <ide> import static org.junit.Assert.assertEquals; <del> <del>; <ide> <ide> public class Tester { <ide> @Test
JavaScript
apache-2.0
250162d37b19604099d5ebc77fda6cba57010dc8
0
dc-js/dc.js,dc-js/dc.js,dc-js/dc.js,dc-js/dc.js,dc-js/dc.js
import {pluck, utils} from '../core/utils'; /** * Legend is a attachable widget that can be added to other dc charts to render horizontal legend * labels. * * Examples: * - {@link http://dc-js.github.com/dc.js/ Nasdaq 100 Index} * - {@link http://dc-js.github.com/dc.js/crime/index.html Canadian City Crime Stats} * @class Legend * @memberof dc * @example * chart.legend(dc.legend().x(400).y(10).itemHeight(13).gap(5)) * @returns {Legend} */ export class Legend { constructor () { const LABEL_GAP = 2; let _parent; let _x = 0; let _y = 0; let _itemHeight = 12; let _gap = 5; let _horizontal = false; let _legendWidth = 560; let _itemWidth = 70; let _autoItemWidth = false; let _legendText = pluck('name'); let _maxItems; let _g; this.parent = function (p) { if (!arguments.length) { return _parent; } _parent = p; return this; }; this.render = () => { _parent.svg().select('g.dc-legend').remove(); _g = _parent.svg().append('g') .attr('class', 'dc-legend') .attr('transform', 'translate(' + _x + ',' + _y + ')'); let legendables = _parent.legendables(); if (_maxItems !== undefined) { legendables = legendables.slice(0, _maxItems); } const itemEnter = _g.selectAll('g.dc-legend-item') .data(legendables) .enter() .append('g') .attr('class', 'dc-legend-item') .on('mouseover', d => { _parent.legendHighlight(d); }) .on('mouseout', d => { _parent.legendReset(d); }) .on('click', d => { d.chart.legendToggle(d); }); _g.selectAll('g.dc-legend-item') .classed('fadeout', d => d.chart.isLegendableHidden(d)); if (legendables.some(pluck('dashstyle'))) { itemEnter .append('line') .attr('x1', 0) .attr('y1', _itemHeight / 2) .attr('x2', _itemHeight) .attr('y2', _itemHeight / 2) .attr('stroke-width', 2) .attr('stroke-dasharray', pluck('dashstyle')) .attr('stroke', pluck('color')); } else { itemEnter .append('rect') .attr('width', _itemHeight) .attr('height', _itemHeight) .attr('fill', d => d ? d.color : 'blue'); } itemEnter.append('text') .text(_legendText) .attr('x', _itemHeight + LABEL_GAP) .attr('y', function () { return _itemHeight / 2 + (this.clientHeight ? this.clientHeight : 13) / 2 - 2; }); let cumulativeLegendTextWidth = 0; let row = 0; itemEnter.attr('transform', function (d, i) { if (_horizontal) { const itemWidth = _autoItemWidth === true ? this.getBBox().width + _gap : _itemWidth; if ((cumulativeLegendTextWidth + itemWidth) > _legendWidth && cumulativeLegendTextWidth > 0) { ++row; cumulativeLegendTextWidth = 0; } const translateBy = 'translate(' + cumulativeLegendTextWidth + ',' + row * legendItemHeight() + ')'; cumulativeLegendTextWidth += itemWidth; return translateBy; } else { return 'translate(0,' + i * legendItemHeight() + ')'; } }); }; function legendItemHeight () { return _gap + _itemHeight; } /** * Set or get x coordinate for legend widget. * @method x * @memberof dc.legend * @instance * @param {Number} [x=0] * @returns {Number|dc.legend} */ this.x = function (x) { if (!arguments.length) { return _x; } _x = x; return this; }; /** * Set or get y coordinate for legend widget. * @method y * @memberof dc.legend * @instance * @param {Number} [y=0] * @returns {Number|dc.legend} */ this.y = function (y) { if (!arguments.length) { return _y; } _y = y; return this; }; /** * Set or get gap between legend items. * @method gap * @memberof dc.legend * @instance * @param {Number} [gap=5] * @returns {Number|dc.legend} */ this.gap = function (gap) { if (!arguments.length) { return _gap; } _gap = gap; return this; }; /** * Set or get legend item height. * @method itemHeight * @memberof dc.legend * @instance * @param {Number} [itemHeight=12] * @returns {Number|dc.legend} */ this.itemHeight = function (itemHeight) { if (!arguments.length) { return _itemHeight; } _itemHeight = itemHeight; return this; }; /** * Position legend horizontally instead of vertically. * @method horizontal * @memberof dc.legend * @instance * @param {Boolean} [horizontal=false] * @returns {Boolean|dc.legend} */ this.horizontal = function (horizontal) { if (!arguments.length) { return _horizontal; } _horizontal = horizontal; return this; }; /** * Maximum width for horizontal legend. * @method legendWidth * @memberof dc.legend * @instance * @param {Number} [legendWidth=500] * @returns {Number|dc.legend} */ this.legendWidth = function (legendWidth) { if (!arguments.length) { return _legendWidth; } _legendWidth = legendWidth; return this; }; /** * Legend item width for horizontal legend. * @method itemWidth * @memberof dc.legend * @instance * @param {Number} [itemWidth=70] * @returns {Number|dc.legend} */ this.itemWidth = function (itemWidth) { if (!arguments.length) { return _itemWidth; } _itemWidth = itemWidth; return this; }; /** * Turn automatic width for legend items on or off. If true, {@link dc.legend#itemWidth itemWidth} is ignored. * This setting takes into account the {@link dc.legend#gap gap}. * @method autoItemWidth * @memberof dc.legend * @instance * @param {Boolean} [autoItemWidth=false] * @returns {Boolean|dc.legend} */ this.autoItemWidth = function (autoItemWidth) { if (!arguments.length) { return _autoItemWidth; } _autoItemWidth = autoItemWidth; return this; }; /** * Set or get the legend text function. The legend widget uses this function to render the legend * text for each item. If no function is specified the legend widget will display the names * associated with each group. * @method legendText * @memberof dc.legend * @instance * @param {Function} [legendText] * @returns {Function|dc.legend} * @example * // default legendText * legend.legendText(dc.pluck('name')) * * // create numbered legend items * chart.legend(dc.legend().legendText(function(d, i) { return i + '. ' + d.name; })) * * // create legend displaying group counts * chart.legend(dc.legend().legendText(function(d) { return d.name + ': ' d.data; })) **/ this.legendText = function (legendText) { if (!arguments.length) { return _legendText; } _legendText = legendText; return this; }; /** * Maximum number of legend items to display * @method maxItems * @memberof dc.legend * @instance * @param {Number} [maxItems] * @return {dc.legend} */ this.maxItems = function (maxItems) { if (!arguments.length) { return _maxItems; } _maxItems = utils.isNumber(maxItems) ? maxItems : undefined; return this; }; return this; } } export const legend = () => new Legend();
src/base/legend.js
import {pluck, utils} from '../core/utils'; /** * Legend is a attachable widget that can be added to other dc charts to render horizontal legend * labels. * * Examples: * - {@link http://dc-js.github.com/dc.js/ Nasdaq 100 Index} * - {@link http://dc-js.github.com/dc.js/crime/index.html Canadian City Crime Stats} * @class Legend * @memberof dc * @example * chart.legend(dc.legend().x(400).y(10).itemHeight(13).gap(5)) * @returns {Legend} */ export class Legend { constructor () { const LABEL_GAP = 2; let _parent; let _x = 0; let _y = 0; let _itemHeight = 12; let _gap = 5; let _horizontal = false; let _legendWidth = 560; let _itemWidth = 70; let _autoItemWidth = false; let _legendText = pluck('name'); let _maxItems; let _g; this.parent = function (p) { if (!arguments.length) { return _parent; } _parent = p; return this; }; this.render = () => { _parent.svg().select('g.dc-legend').remove(); _g = _parent.svg().append('g') .attr('class', 'dc-legend') .attr('transform', 'translate(' + _x + ',' + _y + ')'); let legendables = _parent.legendables(); if (_maxItems !== undefined) { legendables = legendables.slice(0, _maxItems); } const itemEnter = _g.selectAll('g.dc-legend-item') .data(legendables) .enter() .append('g') .attr('class', 'dc-legend-item') .on('mouseover', d => { _parent.legendHighlight(d); }) .on('mouseout', d => { _parent.legendReset(d); }) .on('click', d => { d.chart.legendToggle(d); }); _g.selectAll('g.dc-legend-item') .classed('fadeout', d => d.chart.isLegendableHidden(d)); if (legendables.some(pluck('dashstyle'))) { itemEnter .append('line') .attr('x1', 0) .attr('y1', _itemHeight / 2) .attr('x2', _itemHeight) .attr('y2', _itemHeight / 2) .attr('stroke-width', 2) .attr('stroke-dasharray', pluck('dashstyle')) .attr('stroke', pluck('color')); } else { itemEnter .append('rect') .attr('width', _itemHeight) .attr('height', _itemHeight) .attr('fill', d => d ? d.color : 'blue'); } itemEnter.append('text') .text(_legendText) .attr('x', _itemHeight + LABEL_GAP) .attr('y', function () { return _itemHeight / 2 + (this.clientHeight ? this.clientHeight : 13) / 2 - 2; }); let _cumulativeLegendTextWidth = 0; let row = 0; itemEnter.attr('transform', function (d, i) { if (_horizontal) { const itemWidth = _autoItemWidth === true ? this.getBBox().width + _gap : _itemWidth; if ((_cumulativeLegendTextWidth + itemWidth) > _legendWidth && _cumulativeLegendTextWidth > 0) { ++row; _cumulativeLegendTextWidth = 0; } const translateBy = 'translate(' + _cumulativeLegendTextWidth + ',' + row * legendItemHeight() + ')'; _cumulativeLegendTextWidth += itemWidth; return translateBy; } else { return 'translate(0,' + i * legendItemHeight() + ')'; } }); }; function legendItemHeight () { return _gap + _itemHeight; } /** * Set or get x coordinate for legend widget. * @method x * @memberof dc.legend * @instance * @param {Number} [x=0] * @returns {Number|dc.legend} */ this.x = function (x) { if (!arguments.length) { return _x; } _x = x; return this; }; /** * Set or get y coordinate for legend widget. * @method y * @memberof dc.legend * @instance * @param {Number} [y=0] * @returns {Number|dc.legend} */ this.y = function (y) { if (!arguments.length) { return _y; } _y = y; return this; }; /** * Set or get gap between legend items. * @method gap * @memberof dc.legend * @instance * @param {Number} [gap=5] * @returns {Number|dc.legend} */ this.gap = function (gap) { if (!arguments.length) { return _gap; } _gap = gap; return this; }; /** * Set or get legend item height. * @method itemHeight * @memberof dc.legend * @instance * @param {Number} [itemHeight=12] * @returns {Number|dc.legend} */ this.itemHeight = function (itemHeight) { if (!arguments.length) { return _itemHeight; } _itemHeight = itemHeight; return this; }; /** * Position legend horizontally instead of vertically. * @method horizontal * @memberof dc.legend * @instance * @param {Boolean} [horizontal=false] * @returns {Boolean|dc.legend} */ this.horizontal = function (horizontal) { if (!arguments.length) { return _horizontal; } _horizontal = horizontal; return this; }; /** * Maximum width for horizontal legend. * @method legendWidth * @memberof dc.legend * @instance * @param {Number} [legendWidth=500] * @returns {Number|dc.legend} */ this.legendWidth = function (legendWidth) { if (!arguments.length) { return _legendWidth; } _legendWidth = legendWidth; return this; }; /** * Legend item width for horizontal legend. * @method itemWidth * @memberof dc.legend * @instance * @param {Number} [itemWidth=70] * @returns {Number|dc.legend} */ this.itemWidth = function (itemWidth) { if (!arguments.length) { return _itemWidth; } _itemWidth = itemWidth; return this; }; /** * Turn automatic width for legend items on or off. If true, {@link dc.legend#itemWidth itemWidth} is ignored. * This setting takes into account the {@link dc.legend#gap gap}. * @method autoItemWidth * @memberof dc.legend * @instance * @param {Boolean} [autoItemWidth=false] * @returns {Boolean|dc.legend} */ this.autoItemWidth = function (autoItemWidth) { if (!arguments.length) { return _autoItemWidth; } _autoItemWidth = autoItemWidth; return this; }; /** * Set or get the legend text function. The legend widget uses this function to render the legend * text for each item. If no function is specified the legend widget will display the names * associated with each group. * @method legendText * @memberof dc.legend * @instance * @param {Function} [legendText] * @returns {Function|dc.legend} * @example * // default legendText * legend.legendText(dc.pluck('name')) * * // create numbered legend items * chart.legend(dc.legend().legendText(function(d, i) { return i + '. ' + d.name; })) * * // create legend displaying group counts * chart.legend(dc.legend().legendText(function(d) { return d.name + ': ' d.data; })) **/ this.legendText = function (legendText) { if (!arguments.length) { return _legendText; } _legendText = legendText; return this; }; /** * Maximum number of legend items to display * @method maxItems * @memberof dc.legend * @instance * @param {Number} [maxItems] * @return {dc.legend} */ this.maxItems = function (maxItems) { if (!arguments.length) { return _maxItems; } _maxItems = utils.isNumber(maxItems) ? maxItems : undefined; return this; }; return this; } } export const legend = () => new Legend();
Correct variable naming convention
src/base/legend.js
Correct variable naming convention
<ide><path>rc/base/legend.js <ide> return _itemHeight / 2 + (this.clientHeight ? this.clientHeight : 13) / 2 - 2; <ide> }); <ide> <del> let _cumulativeLegendTextWidth = 0; <add> let cumulativeLegendTextWidth = 0; <ide> let row = 0; <ide> itemEnter.attr('transform', function (d, i) { <ide> if (_horizontal) { <ide> const itemWidth = _autoItemWidth === true ? this.getBBox().width + _gap : _itemWidth; <del> if ((_cumulativeLegendTextWidth + itemWidth) > _legendWidth && _cumulativeLegendTextWidth > 0) { <add> if ((cumulativeLegendTextWidth + itemWidth) > _legendWidth && cumulativeLegendTextWidth > 0) { <ide> ++row; <del> _cumulativeLegendTextWidth = 0; <add> cumulativeLegendTextWidth = 0; <ide> } <del> const translateBy = 'translate(' + _cumulativeLegendTextWidth + ',' + row * legendItemHeight() + ')'; <del> _cumulativeLegendTextWidth += itemWidth; <add> const translateBy = 'translate(' + cumulativeLegendTextWidth + ',' + row * legendItemHeight() + ')'; <add> cumulativeLegendTextWidth += itemWidth; <ide> return translateBy; <ide> } else { <ide> return 'translate(0,' + i * legendItemHeight() + ')';
Java
apache-2.0
8ab1d50e1d72b9d47c6f74c2e95fea530e5edef4
0
apache/incubator-taverna-plugin-component
package net.sf.taverna.t2.component.ui.serviceprovider; import static java.util.Arrays.asList; import static org.apache.log4j.Logger.getLogger; import java.net.URL; import java.util.Arrays; import java.util.List; import javax.swing.Icon; import net.sf.taverna.t2.component.ComponentActivity; import net.sf.taverna.t2.component.ComponentActivityConfigurationBean; import net.sf.taverna.t2.component.api.RegistryException; import net.sf.taverna.t2.component.api.Version; import net.sf.taverna.t2.component.preference.ComponentPreference; import net.sf.taverna.t2.component.registry.ComponentUtil; import net.sf.taverna.t2.component.registry.ComponentVersion; import net.sf.taverna.t2.servicedescriptions.ServiceDescription; import net.sf.taverna.t2.workflowmodel.processor.activity.Activity; import org.apache.log4j.Logger; public class ComponentServiceDesc extends ServiceDescription<ComponentActivityConfigurationBean> { private static ComponentPreference preference = ComponentPreference .getInstance(); @SuppressWarnings("unused") private static Logger logger = getLogger(ComponentServiceDesc.class); private Version.ID identification; public ComponentServiceDesc(Version.ID identification) { this.identification = identification; } /** * The subclass of Activity which should be instantiated when adding a * service for this description */ @Override public Class<? extends Activity<ComponentActivityConfigurationBean>> getActivityClass() { return ComponentActivity.class; } /** * The configuration bean which is to be used for configuring the * instantiated activity. Making this bean will typically require some of * the fields set on this service description, like an endpoint URL or * method name. * */ @Override public ComponentActivityConfigurationBean getActivityConfiguration() { return new ComponentActivityConfigurationBean(getIdentification()); } /** * An icon to represent this service description in the service palette. */ @Override public Icon getIcon() { return ComponentServiceIcon.getIcon(); } /** * The display name that will be shown in service palette and will be used * as a template for processor name when added to workflow. */ @Override public String getName() { return getIdentification().getComponentName(); } /** * The path to this service description in the service palette. Folders will * be created for each element of the returned path. */ @Override public List<String> getPath() { // For deeper paths you may return several strings return asList("Components", preference.getRegistryName(identification.getRegistryBase()), identification.getFamilyName()); } /** * Return a list of data values uniquely identifying this service * description (to avoid duplicates). Include only primary key like fields, * i.e. ignore descriptions, icons, etc. */ @Override protected List<? extends Object> getIdentifyingData() { return Arrays.<Object> asList(identification.getRegistryBase(), identification.getFamilyName(), identification.getComponentName()); } @Override public String toString() { return "Component " + getName(); } /** * @return the identification */ public Version.ID getIdentification() { return identification; } /** * @param identification * the identification to set */ public void setIdentification(Version.ID identification) { this.identification = identification; } public URL getHelpURL() { try { Version version = ComponentUtil.calculateComponentVersion(getIdentification()); return version.getHelpURL(); } catch (RegistryException e) { logger.error(e); } return null; } }
src/main/java/net/sf/taverna/t2/component/ui/serviceprovider/ComponentServiceDesc.java
package net.sf.taverna.t2.component.ui.serviceprovider; import static java.util.Arrays.asList; import static org.apache.log4j.Logger.getLogger; import java.util.Arrays; import java.util.List; import javax.swing.Icon; import net.sf.taverna.t2.component.ComponentActivity; import net.sf.taverna.t2.component.ComponentActivityConfigurationBean; import net.sf.taverna.t2.component.api.Version; import net.sf.taverna.t2.component.preference.ComponentPreference; import net.sf.taverna.t2.servicedescriptions.ServiceDescription; import net.sf.taverna.t2.workflowmodel.processor.activity.Activity; import org.apache.log4j.Logger; public class ComponentServiceDesc extends ServiceDescription<ComponentActivityConfigurationBean> { private static ComponentPreference preference = ComponentPreference .getInstance(); @SuppressWarnings("unused") private static Logger logger = getLogger(ComponentServiceDesc.class); private Version.ID identification; public ComponentServiceDesc(Version.ID identification) { this.identification = identification; } /** * The subclass of Activity which should be instantiated when adding a * service for this description */ @Override public Class<? extends Activity<ComponentActivityConfigurationBean>> getActivityClass() { return ComponentActivity.class; } /** * The configuration bean which is to be used for configuring the * instantiated activity. Making this bean will typically require some of * the fields set on this service description, like an endpoint URL or * method name. * */ @Override public ComponentActivityConfigurationBean getActivityConfiguration() { return new ComponentActivityConfigurationBean(getIdentification()); } /** * An icon to represent this service description in the service palette. */ @Override public Icon getIcon() { return ComponentServiceIcon.getIcon(); } /** * The display name that will be shown in service palette and will be used * as a template for processor name when added to workflow. */ @Override public String getName() { return getIdentification().getComponentName(); } /** * The path to this service description in the service palette. Folders will * be created for each element of the returned path. */ @Override public List<String> getPath() { // For deeper paths you may return several strings return asList("Components", preference.getRegistryName(identification.getRegistryBase()), identification.getFamilyName()); } /** * Return a list of data values uniquely identifying this service * description (to avoid duplicates). Include only primary key like fields, * i.e. ignore descriptions, icons, etc. */ @Override protected List<? extends Object> getIdentifyingData() { return Arrays.<Object> asList(identification.getRegistryBase(), identification.getFamilyName(), identification.getComponentName()); } @Override public String toString() { return "Component " + getName(); } /** * @return the identification */ public Version.ID getIdentification() { return identification; } /** * @param identification * the identification to set */ public void setIdentification(Version.ID identification) { this.identification = identification; } }
Added ability to get help on component git-svn-id: c991a65cd530d4df12bc8798d93b4b35787426a0@16699 bf327186-88b3-11dd-a302-d386e5130c1c
src/main/java/net/sf/taverna/t2/component/ui/serviceprovider/ComponentServiceDesc.java
Added ability to get help on component
<ide><path>rc/main/java/net/sf/taverna/t2/component/ui/serviceprovider/ComponentServiceDesc.java <ide> import static java.util.Arrays.asList; <ide> import static org.apache.log4j.Logger.getLogger; <ide> <add>import java.net.URL; <ide> import java.util.Arrays; <ide> import java.util.List; <ide> <ide> <ide> import net.sf.taverna.t2.component.ComponentActivity; <ide> import net.sf.taverna.t2.component.ComponentActivityConfigurationBean; <add>import net.sf.taverna.t2.component.api.RegistryException; <ide> import net.sf.taverna.t2.component.api.Version; <ide> import net.sf.taverna.t2.component.preference.ComponentPreference; <add>import net.sf.taverna.t2.component.registry.ComponentUtil; <add>import net.sf.taverna.t2.component.registry.ComponentVersion; <ide> import net.sf.taverna.t2.servicedescriptions.ServiceDescription; <ide> import net.sf.taverna.t2.workflowmodel.processor.activity.Activity; <ide> <ide> public void setIdentification(Version.ID identification) { <ide> this.identification = identification; <ide> } <add> <add> public URL getHelpURL() { <add> try { <add> Version version = ComponentUtil.calculateComponentVersion(getIdentification()); <add> return version.getHelpURL(); <add> } catch (RegistryException e) { <add> logger.error(e); <add> } <add> return null; <add> } <ide> }
JavaScript
apache-2.0
2e4a6e01bc90b1675d460da94d34377edf56c521
0
jlhughes/foam,mdittmer/foam,mdittmer/foam,osric-the-knight/foam,foam-framework/foam,mdittmer/foam,jacksonic/foam,jlhughes/foam,osric-the-knight/foam,foam-framework/foam,jlhughes/foam,mdittmer/foam,foam-framework/foam,osric-the-knight/foam,osric-the-knight/foam,jacksonic/foam,jacksonic/foam,foam-framework/foam,jlhughes/foam,foam-framework/foam,jacksonic/foam
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ name: 'AceCodeView', package: 'foam.flow', extendsModel: 'foam.flow.Element', requires: [ 'foam.flow.SourceCode' ], imports: [ 'document', 'codeViewLoadState$', 'aceScript$' ], properties: [ { name: 'data', type: 'foam.flow.SourceCode', factory: function() { return this.SourceCode.create({ data: 'console.log("Hello world!");' }); }, postSet: function(old, nu) { if ( old ) { old.language$.removeListener(this.onLanguageChange); old.code$.removeListener(this.onDataCodeChange); } if ( nu ) { nu.language$.addListener(this.onLanguageChange); nu.code$.addListener(this.onDataCodeChange); } this.onLanguageChange(); this.onDataCodeChange(); } }, { // TODO(markdittmer): Should be able to use foam.ui.ModeProperty here // but it doesn't seem to be working. model_: 'StringProperty', name: 'mode', defaultValue: 'read-write', postSet: function(old, nu) { if ( ! this.codeView || old === nu ) return; var nuReadOnly = nu === 'read-only'; if ( nuReadOnly !== this.codeView.getReadOnly() ) { if ( nuReadOnly ) this.applyReadOnlySettings(); else this.applyReadWriteSettings(); } } }, { model_: 'StringProperty', name: 'codeViewLoadState', defaultValue: 'unloaded' }, { model_: 'StringProperty', name: 'pathToAce', defaultValue: 'ace-builds/src-noconflict/ace.js' }, { model_: 'StringProperty', name: 'aceTheme', defaultValue: 'ace/theme/textmate' }, { model_: 'StringProperty', name: 'aceMode', defaultValue: 'ace/mode/javascript', postSet: function(old, nu) { if ( ! this.codeView ) return; this.codeView.getSession().setMode('ace/mode/' + this.data.language); } }, { model_: 'IntProperty', name: 'aceTabSize', defaultValue: 2 }, { model_: 'IntProperty', name: 'aceMinLines', defaultValue: 5 }, { model_: 'IntProperty', name: 'aceMaxLines', defaultValue: 25 }, { model_: 'StringProperty', name: 'aceReadOnlyTheme', defaultValue: 'ace/theme/kuroir' }, { model_: 'IntProperty', name: 'aceReadOnlyMinLines', defaultValue: 2 }, { model_: 'IntProperty', name: 'aceReadOnlyMaxLines', defaultValue: 25 }, { name: 'aceScript' }, { name: 'codeView' }, { model_: 'BooleanProperty', name: 'allFolded', defaultValue: false } ], methods: [ { name: 'initHTML', code: function() { this.SUPER.apply(this, arguments); if ( this.codeViewLoadState === 'unloaded' ) { this.aceScript = this.document.createElement('script'); this.aceScript.src = this.pathToAce; this.document.head.appendChild(this.aceScript); this.codeViewLoadState = 'pending'; } if ( this.codeViewLoadState === 'pending' ) { this.aceScript.addEventListener('load', this.onAceLoaded); this.aceScript.addEventListener('error', this.onAceLoadFailed); } if ( this.codeViewLoadState === 'loaded' ) this.onAceLoaded(); if ( this.codeViewLoadState === 'failed' ) this.onAceLoadFailed(); } }, { name: 'destroy', code: function() { if ( this.codeViewLoadState === 'pending' ) this.removeDOMListeners_(); this.SUPER.apply(this, arguments); } }, { name: 'removeDOMListeners_', code: function() { this.aceScript.removeEventListener('load', this.onAceLoaded); this.aceScript.removeEventListener('error', this.onAceLoadFailed); } }, { name: 'applyReadOnlySettings', code: function() { this.codeView.setOptions({ theme: this.aceReadOnlyTheme, mode: this.aceMode, tabSize: this.aceTabSize, minLines: this.aceReadOnlyMinLines, maxLines: this.aceReadOnlyMaxLines, readOnly: this.mode === 'read-only' }); this.foldAll(); } }, { name: 'applyReadWriteSettings', code: function() { this.codeView.setOptions({ theme: this.aceTheme, mode: this.aceMode, tabSize: this.aceTabSize, minLines: this.aceMinLines, maxLines: this.aceMaxLines, readOnly: this.mode === 'read-only' }); this.unfoldAll(); } }, { name: 'foldAll', code: function() { if ( ! this.codeView || this.allFolded ) return; this.codeView.selectAll(); this.codeView.getSession().toggleFold(); this.codeView.clearSelection(); this.allFolded = true; } }, { name: 'unfoldAll', code: function(tryToUnfold) { if ( ! this.codeView || ! this.allFolded ) return; this.codeView.selectAll(); this.codeView.getSession().toggleFold(); this.codeView.clearSelection(); this.allFolded = false; } } ], listeners: [ { name: 'onAceLoaded', code: function() { this.removeDOMListeners_(); if ( ! this.$ ) return; var codeView = this.codeView = GLOBAL.ace.edit(this.$); codeView.setValue(this.data && this.data.code && this.data.code.trim() || ''); if ( this.mode === 'read-only' ) this.applyReadOnlySettings(); else this.applyReadWriteSettings(); codeView.clearSelection(); var session = codeView.getSession(); session.on('changeFold', this.onChangeFold); session.on('change', this.onAceCodeChange); this.codeViewLoadState = 'loaded'; } }, { name: 'onAceLoadFailed', code: function() { this.removeDOMListeners_(); this.codeViewLoadState = 'failed'; } }, { name: 'onAceCodeChange', isFramed: true, code: function(e) { if ( ! this.codeView || ! this.data ) return; var codeViewCode = this.codeView.getValue(); if ( codeViewCode !== this.data.code ) this.data.code = codeViewCode; } }, { name: 'onDataCodeChange', isFramed: true, code: function(e) { if ( ! this.codeView || ! this.data ) return; var codeViewCode = this.codeView.getValue(); if ( codeViewCode !== this.data.code ) { this.codeView.setValue(this.data.code); this.codeView.clearSelection(); // Value changes will unfold code; re-fold read-only view. Note that // this may be a change if user manually unfolded some code, but kept // it read-only. Also, any folds in read-write views will get unfolded // without correct. Unfortuantely, using mode as a guess at what we // should do is all we've got. if ( this.mode === 'read-only' ) this.foldAll(); } } }, { name: 'onLanguageChange', code: function() { if ( ! this.data ) return; var aceMode = 'ace/mode/' + this.data.language; if ( this.aceMode !== aceMode ) this.aceMode = aceMode; } }, { name: 'onChangeFold', code: function() { if ( this.mode !== 'read-only' ) return; this.allFolded = false; } } ], templates: [ // Support both <ace-code-view>...</ace-code-view> and %%myAceCodeView. function toInnerHTML() {/* <% if ( this.inner ) { %><%= this.inner() %><% } else { %><%= this.data.code %><% } %> */}, function CSS() {/* ace-code-view { display: block; font: 14px/normal 'Monaco', 'Menlo', 'Ubuntu Mono', 'Consolas', 'source-code-pro', monospace; flex-grow: 1; } */} ] });
js/foam/flow/AceCodeView.js
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ name: 'AceCodeView', package: 'foam.flow', extendsModel: 'foam.flow.Element', requires: [ 'foam.flow.SourceCode' ], imports: [ 'document', 'codeViewLoadState$', 'aceScript$' ], properties: [ { name: 'data', type: 'foam.flow.SourceCode', factory: function() { return this.SourceCode.create({ data: 'console.log("Hello world!");' }); }, postSet: function(old, nu) { if ( old ) old.language$.removeListener(this.onLanguageChange); if ( nu ) nu.language$.addListener(this.onLanguageChange); if ( ! old || old.language !== nu.language ) this.onLanguageChange(); if ( ! this.codeView ) return; var codeViewCode = this.codeView.getValue(); if ( codeViewCode !== nu.code ) { this.codeView.setValue(nu.code); this.codeView.clearSelection(); } } }, { // TODO(markdittmer): Should be able to use foam.ui.ModeProperty here // but it doesn't seem to be working. model_: 'StringProperty', name: 'mode', defaultValue: 'read-write', postSet: function(old, nu) { if ( ! this.codeView || old === nu ) return; var nuReadOnly = nu === 'read-only'; if ( nuReadOnly !== this.codeView.getReadOnly() ) { if ( nuReadOnly ) this.applyReadOnlySettings(); else this.applyReadWriteSettings(); } } }, { model_: 'StringProperty', name: 'codeViewLoadState', defaultValue: 'unloaded' }, { model_: 'StringProperty', name: 'pathToAce', defaultValue: 'ace-builds/src-noconflict/ace.js' }, { model_: 'StringProperty', name: 'aceTheme', defaultValue: 'ace/theme/textmate' }, { model_: 'StringProperty', name: 'aceMode', defaultValue: 'ace/mode/javascript', postSet: function(old, nu) { if ( ! this.codeView ) return; this.codeView.getSession().setMode('ace/mode/' + this.data.language); } }, { model_: 'IntProperty', name: 'aceTabSize', defaultValue: 2 }, { model_: 'IntProperty', name: 'aceMinLines', defaultValue: 5 }, { model_: 'IntProperty', name: 'aceMaxLines', defaultValue: 25 }, { model_: 'StringProperty', name: 'aceReadOnlyTheme', defaultValue: 'ace/theme/kuroir' }, { model_: 'IntProperty', name: 'aceReadOnlyMinLines', defaultValue: 2 }, { model_: 'IntProperty', name: 'aceReadOnlyMaxLines', defaultValue: 25 }, { name: 'aceScript' }, { name: 'codeView' }, { model_: 'BooleanProperty', name: 'allFolded', defaultValue: false } ], methods: [ { name: 'initHTML', code: function() { this.SUPER.apply(this, arguments); if ( this.codeViewLoadState === 'unloaded' ) { this.aceScript = this.document.createElement('script'); this.aceScript.src = this.pathToAce; this.document.head.appendChild(this.aceScript); this.codeViewLoadState = 'pending'; } if ( this.codeViewLoadState === 'pending' ) { this.aceScript.addEventListener('load', this.onAceLoaded); this.aceScript.addEventListener('error', this.onAceLoadFailed); } if ( this.codeViewLoadState === 'loaded' ) this.onAceLoaded(); if ( this.codeViewLoadState === 'failed' ) this.onAceLoadFailed(); } }, { name: 'destroy', code: function() { if ( this.codeViewLoadState === 'pending' ) this.removeDOMListeners_(); this.SUPER.apply(this, arguments); } }, { name: 'removeDOMListeners_', code: function() { this.aceScript.removeEventListener('load', this.onAceLoaded); this.aceScript.removeEventListener('error', this.onAceLoadFailed); } }, { name: 'applyReadOnlySettings', code: function() { this.codeView.setOptions({ theme: this.aceReadOnlyTheme, mode: this.aceMode, tabSize: this.aceTabSize, minLines: this.aceReadOnlyMinLines, maxLines: this.aceReadOnlyMaxLines, readOnly: this.mode === 'read-only' }); this.foldAll(); } }, { name: 'applyReadWriteSettings', code: function() { this.codeView.setOptions({ theme: this.aceTheme, mode: this.aceMode, tabSize: this.aceTabSize, minLines: this.aceMinLines, maxLines: this.aceMaxLines, readOnly: this.mode === 'read-only' }); this.unfoldAll(); } }, { name: 'foldAll', code: function() { if ( ! this.codeView || this.allFolded ) return; this.codeView.selectAll(); this.codeView.getSession().toggleFold(); this.codeView.clearSelection(); this.allFolded = true; } }, { name: 'unfoldAll', code: function(tryToUnfold) { if ( ! this.codeView || ! this.allFolded ) return; this.codeView.selectAll(); this.codeView.getSession().toggleFold(); this.codeView.clearSelection(); this.allFolded = false; } } ], listeners: [ { name: 'onAceLoaded', code: function() { this.removeDOMListeners_(); if ( ! this.$ ) return; var codeView = this.codeView = GLOBAL.ace.edit(this.$); codeView.setValue(this.data && this.data.code && this.data.code.trim() || ''); if ( this.mode === 'read-only' ) this.applyReadOnlySettings(); else this.applyReadWriteSettings(); codeView.clearSelection(); var session = codeView.getSession(); session.on('changeFold', this.onChangeFold); session.on('change', this.onCodeChange); this.codeViewLoadState = 'loaded'; } }, { name: 'onAceLoadFailed', code: function() { this.removeDOMListeners_(); this.codeViewLoadState = 'failed'; } }, { name: 'onCodeChange', code: function(e) { var codeViewCode = this.codeView.getValue(); if ( codeViewCode !== this.data ) this.data.code = codeViewCode; } }, { name: 'onLanguageChange', code: function() { this.aceMode = 'ace/mode/' + this.data.language; } }, { name: 'onChangeFold', code: function() { if ( this.mode !== 'read-only' ) return; this.allFolded = false; } } ], templates: [ // Support both <ace-code-view>...</ace-code-view> and %%myAceCodeView. function toInnerHTML() {/* <% if ( this.inner ) { %><%= this.inner() %><% } else { %><%= this.data.code %><% } %> */}, function CSS() {/* ace-code-view { display: block; font: 14px/normal 'Monaco', 'Menlo', 'Ubuntu Mono', 'Consolas', 'source-code-pro', monospace; flex-grow: 1; } */} ] });
AceCodeView: Support proper two-way data binding for data.code changes coming from either code editor or from elsewhere
js/foam/flow/AceCodeView.js
AceCodeView: Support proper two-way data binding for data.code changes coming from either code editor or from elsewhere
<ide><path>s/foam/flow/AceCodeView.js <ide> }); <ide> }, <ide> postSet: function(old, nu) { <del> if ( old ) old.language$.removeListener(this.onLanguageChange); <del> if ( nu ) nu.language$.addListener(this.onLanguageChange); <del> if ( ! old || old.language !== nu.language ) this.onLanguageChange(); <del> <del> if ( ! this.codeView ) return; <del> <del> var codeViewCode = this.codeView.getValue(); <del> if ( codeViewCode !== nu.code ) { <del> this.codeView.setValue(nu.code); <del> this.codeView.clearSelection(); <del> } <add> if ( old ) { <add> old.language$.removeListener(this.onLanguageChange); <add> old.code$.removeListener(this.onDataCodeChange); <add> } <add> if ( nu ) { <add> nu.language$.addListener(this.onLanguageChange); <add> nu.code$.addListener(this.onDataCodeChange); <add> } <add> this.onLanguageChange(); <add> this.onDataCodeChange(); <ide> } <ide> }, <ide> { <ide> <ide> var session = codeView.getSession(); <ide> session.on('changeFold', this.onChangeFold); <del> session.on('change', this.onCodeChange); <add> session.on('change', this.onAceCodeChange); <ide> this.codeViewLoadState = 'loaded'; <ide> } <ide> }, <ide> } <ide> }, <ide> { <del> name: 'onCodeChange', <add> name: 'onAceCodeChange', <add> isFramed: true, <ide> code: function(e) { <add> if ( ! this.codeView || ! this.data ) return; <ide> var codeViewCode = this.codeView.getValue(); <del> if ( codeViewCode !== this.data ) this.data.code = codeViewCode; <add> if ( codeViewCode !== this.data.code ) this.data.code = codeViewCode; <add> } <add> }, <add> { <add> name: 'onDataCodeChange', <add> isFramed: true, <add> code: function(e) { <add> if ( ! this.codeView || ! this.data ) return; <add> var codeViewCode = this.codeView.getValue(); <add> if ( codeViewCode !== this.data.code ) { <add> this.codeView.setValue(this.data.code); <add> this.codeView.clearSelection(); <add> <add> // Value changes will unfold code; re-fold read-only view. Note that <add> // this may be a change if user manually unfolded some code, but kept <add> // it read-only. Also, any folds in read-write views will get unfolded <add> // without correct. Unfortuantely, using mode as a guess at what we <add> // should do is all we've got. <add> if ( this.mode === 'read-only' ) this.foldAll(); <add> } <ide> } <ide> }, <ide> { <ide> name: 'onLanguageChange', <ide> code: function() { <del> this.aceMode = 'ace/mode/' + this.data.language; <add> if ( ! this.data ) return; <add> var aceMode = 'ace/mode/' + this.data.language; <add> if ( this.aceMode !== aceMode ) this.aceMode = aceMode; <ide> } <ide> }, <ide> {
Java
epl-1.0
9e6f5e040a328232d38dfa2a059026e1993a07ee
0
debrief/debrief,debrief/debrief,pecko/debrief,debrief/debrief,debrief/debrief,debrief/debrief,pecko/debrief,pecko/debrief,pecko/debrief,debrief/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief,theanuradha/debrief,theanuradha/debrief,theanuradha/debrief,pecko/debrief,theanuradha/debrief
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package org.mwc.debrief.core.ContextOperations; import java.awt.Color; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.commands.operations.IUndoableOperation; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.widgets.Display; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.operations.CMAPOperation; import org.mwc.cmap.core.property_support.RightClickSupport.RightClickContextItemGenerator; import org.mwc.cmap.core.wizards.RangeBearingPage; import org.mwc.debrief.core.wizards.EnterSolutionPage; import org.mwc.debrief.core.wizards.EnterSolutionPage.SolutionDataItem; import org.mwc.debrief.core.wizards.s2r.TMAFromSensorWizard; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.TrackWrapper; import Debrief.Wrappers.Track.AbsoluteTMASegment; import Debrief.Wrappers.Track.CoreTMASegment; import Debrief.Wrappers.Track.TrackSegment; import Debrief.Wrappers.Track.TrackWrapper_Support.SegmentList; import MWC.GUI.Editable; import MWC.GUI.Layer; import MWC.GUI.Layers; import MWC.GenericData.HiResDate; import MWC.GenericData.WorldDistance; import MWC.GenericData.WorldLocation; import MWC.GenericData.WorldSpeed; import MWC.GenericData.WorldVector; import MWC.Utilities.TextFormatting.FormatRNDateTime; /** * @author ian.mayo * */ public class GenerateTMASegmentFromOwnshipPositions implements RightClickContextItemGenerator { private static final WorldSpeed DEFAULT_TARGET_SPEED = new WorldSpeed(12, WorldSpeed.Kts); private static final double DEFAULT_TARGET_COURSE = 120d; // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class testMe extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; public testMe(final String val) { super(val); } public final void testIWork() { } } private static class TMAfromPositions extends CMAPOperation { private final Layers _layers; private final FixWrapper[] _items; private TrackWrapper _newTrack; private final double _courseDegs; private final WorldSpeed _speed; private final WorldVector _offset; public TMAfromPositions(final FixWrapper[] items, WorldVector offset, final Layers theLayers, final double courseDegs, final WorldSpeed speed) { super("Create TMA solution"); _items = items; _layers = theLayers; _courseDegs = courseDegs; _speed = speed; _offset = offset; } @Override public IStatus execute(final IProgressMonitor monitor, final IAdaptable info) throws ExecutionException { HiResDate startTime = _items[0].getDTG(); HiResDate endTime = _items[_items.length - 1].getDTG(); WorldLocation startPoint = _items[0].getLocation().add(_offset); final TrackSegment seg = new AbsoluteTMASegment(_courseDegs, _speed, startPoint, startTime, endTime); // _items, _offset, _speed, // _courseDegs, _layers); // now wrap it _newTrack = new TrackWrapper(); _newTrack.setColor(Color.red); _newTrack.add(seg); final String tNow = TrackSegment.TMA_LEADER + FormatRNDateTime.toString(_newTrack.getStartDTG().getDate() .getTime()); _newTrack.setName(tNow); _layers.addThisLayerAllowDuplication(_newTrack); // sorted, do the update _layers.fireExtended(); return Status.OK_STATUS; } @Override public IStatus undo(final IProgressMonitor monitor, final IAdaptable info) throws ExecutionException { // forget about the new tracks _layers.removeThisLayer(_newTrack); _layers.fireExtended(); return Status.OK_STATUS; } } /** * @param parent * @param theLayers * @param parentLayers * @param subjects */ public void generate(final IMenuManager parent, final Layers theLayers, final Layer[] parentLayers, final Editable[] subjects) { // Action _myAction = null; // so, see if it's something we can do business with if (subjects.length == 1) { // hmm, let's not allow it for just one item // see the equivalent part of RelativeTMASegment if we wish to support // this } else { // so, it's a number of items, Are they all sensor contact wrappers boolean allGood = true; final FixWrapper[] items = new FixWrapper[subjects.length]; for (int i = 0; i < subjects.length; i++) { final Editable editable = subjects[i]; if (editable instanceof FixWrapper) { // hmm, we need to check if this fix is part of a solution. have a // look at the parent FixWrapper fix = (FixWrapper) editable; TrackWrapper track = fix.getTrackWrapper(); SegmentList segments = track.getSegments(); TrackSegment parentSegment = segments.getSegmentFor(fix.getDateTimeGroup().getDate().getTime()); // is this first leg a TMA segment? if (parentSegment instanceof CoreTMASegment) { // yes = in which case we won't offer to // generate a track based upon it allGood = false; } else { // cool, stick with it items[i] = (FixWrapper) editable; } } else { allGood = false; break; } // are we good to go? if (allGood) { // cool wrap it in an action. _myAction = new Action( "Generate TMA solution from selected positions") { @Override public void run() { // get the supporting data final TMAFromSensorWizard wizard = new TMAFromSensorWizard(45d, new WorldDistance(5, WorldDistance.NM), DEFAULT_TARGET_COURSE, DEFAULT_TARGET_SPEED); final WizardDialog dialog = new WizardDialog(Display.getCurrent() .getActiveShell(), wizard); dialog.create(); dialog.open(); // did it work? if (dialog.getReturnCode() == WizardDialog.OK) { WorldVector res = new WorldVector(0, new WorldDistance(5, WorldDistance.NM), null); double courseDegs = 0; WorldSpeed speed = new WorldSpeed(5, WorldSpeed.Kts); final RangeBearingPage offsetPage = (RangeBearingPage) wizard .getPage(RangeBearingPage.NAME); if (offsetPage != null) { if (offsetPage.isPageComplete()) { res = new WorldVector( MWC.Algorithms.Conversions.Degs2Rads(offsetPage .getBearingDegs()), offsetPage.getRange(), null); } } final EnterSolutionPage solutionPage = (EnterSolutionPage) wizard .getPage(EnterSolutionPage.NAME); if (solutionPage != null) { if (solutionPage.isPageComplete()) { final EnterSolutionPage.SolutionDataItem item = (SolutionDataItem) solutionPage .getEditable(); courseDegs = item.getCourse(); speed = item.getSpeed(); } } // ok, go for it. // sort it out as an operation final IUndoableOperation convertToTrack1 = new TMAfromPositions( items, res, theLayers, courseDegs, speed); // ok, stick it on the buffer runIt(convertToTrack1); } else System.err.println("user cancelled"); } }; } } } // go for it, or not... if (_myAction != null) parent.add(_myAction); } /** * put the operation firer onto the undo history. We've refactored this into a * separate method so testing classes don't have to simulate the CorePlugin * * @param operation */ protected void runIt(final IUndoableOperation operation) { CorePlugin.run(operation); } }
org.mwc.debrief.core/src/org/mwc/debrief/core/ContextOperations/GenerateTMASegmentFromOwnshipPositions.java
/* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2014, PlanetMayo Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ package org.mwc.debrief.core.ContextOperations; import java.awt.Color; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.commands.operations.IUndoableOperation; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.wizard.WizardDialog; import org.eclipse.swt.widgets.Display; import org.mwc.cmap.core.CorePlugin; import org.mwc.cmap.core.operations.CMAPOperation; import org.mwc.cmap.core.property_support.RightClickSupport.RightClickContextItemGenerator; import org.mwc.cmap.core.wizards.RangeBearingPage; import org.mwc.debrief.core.wizards.EnterSolutionPage; import org.mwc.debrief.core.wizards.EnterSolutionPage.SolutionDataItem; import org.mwc.debrief.core.wizards.s2r.TMAFromSensorWizard; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.TrackWrapper; import Debrief.Wrappers.Track.AbsoluteTMASegment; import Debrief.Wrappers.Track.CoreTMASegment; import Debrief.Wrappers.Track.TrackSegment; import Debrief.Wrappers.Track.TrackWrapper_Support.SegmentList; import MWC.GUI.Editable; import MWC.GUI.Layer; import MWC.GUI.Layers; import MWC.GenericData.HiResDate; import MWC.GenericData.WorldDistance; import MWC.GenericData.WorldLocation; import MWC.GenericData.WorldSpeed; import MWC.GenericData.WorldVector; import MWC.Utilities.TextFormatting.FormatRNDateTime; /** * @author ian.mayo * */ public class GenerateTMASegmentFromOwnshipPositions implements RightClickContextItemGenerator { private static final WorldSpeed DEFAULT_TARGET_SPEED = new WorldSpeed(12, WorldSpeed.Kts); private static final double DEFAULT_TARGET_COURSE = 120d; // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class testMe extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; public testMe(final String val) { super(val); } public final void testIWork() { } } private static class TMAfromPositions extends CMAPOperation { private final Layers _layers; private final FixWrapper[] _items; private TrackWrapper _newTrack; private final double _courseDegs; private final WorldSpeed _speed; private final WorldVector _offset; public TMAfromPositions(final FixWrapper[] items, WorldVector offset, final Layers theLayers, final double courseDegs, final WorldSpeed speed) { super("Create TMA solution"); _items = items; _layers = theLayers; _courseDegs = courseDegs; _speed = speed; _offset = offset; } @Override public IStatus execute(final IProgressMonitor monitor, final IAdaptable info) throws ExecutionException { HiResDate startTime = _items[0].getDTG(); HiResDate endTime = _items[_items.length - 1].getDTG(); WorldLocation startPoint = _items[0].getLocation().add(_offset); final TrackSegment seg = new AbsoluteTMASegment(_courseDegs, _speed, startPoint, startTime, endTime); // _items, _offset, _speed, // _courseDegs, _layers); // now wrap it _newTrack = new TrackWrapper(); _newTrack.setColor(Color.red); _newTrack.add(seg); final String tNow = TrackSegment.TMA_LEADER + FormatRNDateTime.toString(_newTrack.getStartDTG().getDate() .getTime()); _newTrack.setName(tNow); _layers.addThisLayerAllowDuplication(_newTrack); // sorted, do the update _layers.fireExtended(); return Status.OK_STATUS; } @Override public IStatus undo(final IProgressMonitor monitor, final IAdaptable info) throws ExecutionException { // forget about the new tracks _layers.removeThisLayer(_newTrack); _layers.fireExtended(); return Status.OK_STATUS; } } /** * @param parent * @param theLayers * @param parentLayers * @param subjects */ public void generate(final IMenuManager parent, final Layers theLayers, final Layer[] parentLayers, final Editable[] subjects) { // Action _myAction = null; // so, see if it's something we can do business with if (subjects.length == 1) { // hmm, let's not allow it for just one item // see the equivalent part of RelativeTMASegment if we wish to support // this } else { // so, it's a number of items, Are they all sensor contact wrappers boolean allGood = true; final FixWrapper[] items = new FixWrapper[subjects.length]; for (int i = 0; i < subjects.length; i++) { final Editable editable = subjects[i]; if (editable instanceof FixWrapper) { // hmm, we need to check if this fix is part of a solution. have a // look at the parent FixWrapper fix = (FixWrapper) editable; TrackWrapper track = fix.getTrackWrapper(); SegmentList segments = track.getSegments(); Editable first = segments.getData().iterator().next(); // is this first leg a TMA segment? if (first instanceof CoreTMASegment) { // yes = in which case we won't offer to // generate a track based upon it allGood = false; } else { // cool, stick with it items[i] = (FixWrapper) editable; } } else { allGood = false; break; } // are we good to go? if (allGood) { // cool wrap it in an action. _myAction = new Action( "Generate TMA solution from selected positions") { @Override public void run() { // get the supporting data final TMAFromSensorWizard wizard = new TMAFromSensorWizard(45d, new WorldDistance(5, WorldDistance.NM), DEFAULT_TARGET_COURSE, DEFAULT_TARGET_SPEED); final WizardDialog dialog = new WizardDialog(Display.getCurrent() .getActiveShell(), wizard); dialog.create(); dialog.open(); // did it work? if (dialog.getReturnCode() == WizardDialog.OK) { WorldVector res = new WorldVector(0, new WorldDistance(5, WorldDistance.NM), null); double courseDegs = 0; WorldSpeed speed = new WorldSpeed(5, WorldSpeed.Kts); final RangeBearingPage offsetPage = (RangeBearingPage) wizard .getPage(RangeBearingPage.NAME); if (offsetPage != null) { if (offsetPage.isPageComplete()) { res = new WorldVector( MWC.Algorithms.Conversions.Degs2Rads(offsetPage .getBearingDegs()), offsetPage.getRange(), null); } } final EnterSolutionPage solutionPage = (EnterSolutionPage) wizard .getPage(EnterSolutionPage.NAME); if (solutionPage != null) { if (solutionPage.isPageComplete()) { final EnterSolutionPage.SolutionDataItem item = (SolutionDataItem) solutionPage .getEditable(); courseDegs = item.getCourse(); speed = item.getSpeed(); } } // ok, go for it. // sort it out as an operation final IUndoableOperation convertToTrack1 = new TMAfromPositions( items, res, theLayers, courseDegs, speed); // ok, stick it on the buffer runIt(convertToTrack1); } else System.err.println("user cancelled"); } }; } } } // go for it, or not... if (_myAction != null) parent.add(_myAction); } /** * put the operation firer onto the undo history. We've refactored this into a * separate method so testing classes don't have to simulate the CorePlugin * * @param operation */ protected void runIt(final IUndoableOperation operation) { CorePlugin.run(operation); } }
don't offer operation if TMA leg has been selected
org.mwc.debrief.core/src/org/mwc/debrief/core/ContextOperations/GenerateTMASegmentFromOwnshipPositions.java
don't offer operation if TMA leg has been selected
<ide><path>rg.mwc.debrief.core/src/org/mwc/debrief/core/ContextOperations/GenerateTMASegmentFromOwnshipPositions.java <ide> FixWrapper fix = (FixWrapper) editable; <ide> TrackWrapper track = fix.getTrackWrapper(); <ide> SegmentList segments = track.getSegments(); <del> Editable first = segments.getData().iterator().next(); <add> TrackSegment parentSegment = segments.getSegmentFor(fix.getDateTimeGroup().getDate().getTime()); <ide> <ide> // is this first leg a TMA segment? <del> if (first instanceof CoreTMASegment) <add> if (parentSegment instanceof CoreTMASegment) <ide> { <ide> // yes = in which case we won't offer to <ide> // generate a track based upon it
JavaScript
mit
7d0c872b86f1bd387b114461ebdd6e32338a1e51
0
fengyuanchen/viewer
// A shortcut for triggering custom events trigger: function (type, data) { var e = $.Event(type, data); this.$element.trigger(e); return e; }, shown: function () { var options = this.options; this.transitioning = false; this.isFulled = true; this.isShown = true; this.isVisible = true; this.render(); this.bind(); if ($.isFunction(options.shown)) { this.$element.one(EVENT_SHOWN, options.shown); } this.trigger(EVENT_SHOWN); }, hidden: function () { var options = this.options; this.transitioning = false; this.isViewed = false; this.isFulled = false; this.isShown = false; this.isVisible = false; this.unbind(); this.$body.removeClass(CLASS_OPEN); this.$viewer.addClass(CLASS_HIDE); this.resetList(); this.resetImage(); if ($.isFunction(options.hidden)) { this.$element.one(EVENT_HIDDEN, options.hidden); } this.trigger(EVENT_HIDDEN); }, requestFullscreen: function () { var documentElement = document.documentElement; if (this.isFulled && !document.fullscreenElement && !document.mozFullScreenElement && !document.webkitFullscreenElement && !document.msFullscreenElement) { if (documentElement.requestFullscreen) { documentElement.requestFullscreen(); } else if (documentElement.msRequestFullscreen) { documentElement.msRequestFullscreen(); } else if (documentElement.mozRequestFullScreen) { documentElement.mozRequestFullScreen(); } else if (documentElement.webkitRequestFullscreen) { documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT); } } }, exitFullscreen: function () { if (this.isFulled) { if (document.exitFullscreen) { document.exitFullscreen(); } else if (document.msExitFullscreen) { document.msExitFullscreen(); } else if (document.mozCancelFullScreen) { document.mozCancelFullScreen(); } else if (document.webkitExitFullscreen) { document.webkitExitFullscreen(); } } }, change: function (event) { var offsetX = this.endX - this.startX; var offsetY = this.endY - this.startY; switch (this.action) { // Move the current image case 'move': this.move(offsetX, offsetY); break; // Zoom the current image case 'zoom': this.zoom(function (x1, y1, x2, y2) { var z1 = sqrt(x1 * x1 + y1 * y1); var z2 = sqrt(x2 * x2 + y2 * y2); return (z2 - z1) / z1; }( abs(this.startX - this.startX2), abs(this.startY - this.startY2), abs(this.endX - this.endX2), abs(this.endY - this.endY2) ), false, event); this.startX2 = this.endX2; this.startY2 = this.endY2; break; case 'switch': this.action = 'switched'; if (abs(offsetX) > abs(offsetY)) { if (offsetX > 1) { this.prev(); } else if (offsetX < -1) { this.next(); } } break; // No default } // Override this.startX = this.endX; this.startY = this.endY; }, isSwitchable: function () { var image = this.image; var viewer = this.viewer; return (image.left >= 0 && image.top >= 0 && image.width <= viewer.width && image.height <= viewer.height); } };
src/js/others.js
// A shortcut for triggering custom events trigger: function (type, data) { var e = $.Event(type, data); this.$element.trigger(e); return e; }, shown: function () { var options = this.options; this.transitioning = false; this.isFulled = true; this.isShown = true; this.isVisible = true; this.render(); this.bind(); if ($.isFunction(options.shown)) { this.$element.one(EVENT_SHOWN, options.shown); } this.trigger(EVENT_SHOWN); }, hidden: function () { var options = this.options; this.transitioning = false; this.isViewed = false; this.isFulled = false; this.isShown = false; this.isVisible = false; this.unbind(); this.$body.removeClass(CLASS_OPEN); this.$viewer.addClass(CLASS_HIDE); this.resetList(); this.resetImage(); if ($.isFunction(options.hidden)) { this.$element.one(EVENT_HIDDEN, options.hidden); } this.trigger(EVENT_HIDDEN); }, requestFullscreen: function () { var documentElement = document.documentElement; if (this.isFulled && !document.fullscreenElement && !document.mozFullScreenElement && !document.webkitFullscreenElement && !document.msFullscreenElement) { if (documentElement.requestFullscreen) { documentElement.requestFullscreen(); } else if (documentElement.msRequestFullscreen) { documentElement.msRequestFullscreen(); } else if (documentElement.mozRequestFullScreen) { documentElement.mozRequestFullScreen(); } else if (documentElement.webkitRequestFullscreen) { documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT); } } }, exitFullscreen: function () { if (this.isFulled) { if (document.exitFullscreen) { document.exitFullscreen(); } else if (document.msExitFullscreen) { document.msExitFullscreen(); } else if (document.mozCancelFullScreen) { document.mozCancelFullScreen(); } else if (document.webkitExitFullscreen) { document.webkitExitFullscreen(); } } }, change: function (event) { var offsetX = this.endX - this.startX; var offsetY = this.endY - this.startY; switch (this.action) { // Move the current image case 'move': this.move(offsetX, offsetY); break; // Zoom the current image case 'zoom': this.zoom(function (x1, y1, x2, y2) { var z1 = sqrt(x1 * x1 + y1 * y1); var z2 = sqrt(x2 * x2 + y2 * y2); return (z2 - z1) / z1; }( abs(this.startX - this.startX2), abs(this.startY - this.startY2), abs(this.endX - this.endX2), abs(this.endY - this.endY2) ), false, event); this.startX2 = this.endX2; this.startY2 = this.endY2; break; case 'switch': this.action = 'switched'; if (offsetX > 1) { this.prev(); } else if (offsetX < -1) { this.next(); } break; // No default } // Override this.startX = this.endX; this.startY = this.endY; }, isSwitchable: function () { var image = this.image; var viewer = this.viewer; return (image.left >= 0 && image.top >= 0 && image.width <= viewer.width && image.height <= viewer.height); } };
Improve image switching
src/js/others.js
Improve image switching
<ide><path>rc/js/others.js <ide> case 'switch': <ide> this.action = 'switched'; <ide> <del> if (offsetX > 1) { <del> this.prev(); <del> } else if (offsetX < -1) { <del> this.next(); <add> if (abs(offsetX) > abs(offsetY)) { <add> if (offsetX > 1) { <add> this.prev(); <add> } else if (offsetX < -1) { <add> this.next(); <add> } <ide> } <ide> <ide> break;
JavaScript
mpl-2.0
0a2e108957d930e5d93c6e90d0d79f2f0cbe1433
0
mozilla/fxa-content-server,ReachingOut/fxa-content-server,TDA/fxa-content-server,riadhchtara/fxa-content-server,TDA/fxa-two-factor-auth,vladikoff/fxa-content-server,riadhchtara/fxa-password-manager,jrgm/fxa-content-server,atiqueahmedziad/fxa-content-server,swatilk/fxa-content-server,dannycoates/fxa-content-server,jpetto/fxa-content-server,chilts/fxa-content-server,vladikoff/fxa-content-server,ReachingOut/fxa-content-server,atiqueahmedziad/fxa-content-server,jpetto/fxa-content-server,shane-tomlinson/fxa-content-server,npestana/fxa-content-server,shane-tomlinson/fxa-content-server,dannycoates/fxa-content-server,ofer43211/fxa-content-server,dannycoates/fxa-content-server,TDA/fxa-content-server,riadhchtara/fxa-content-server,ReachingOut/fxa-content-server,shane-tomlinson/fxa-content-server,swatilk/fxa-content-server,vladikoff/fxa-content-server,mozilla/fxa-content-server,npestana/fxa-content-server,TDA/fxa-two-factor-auth,ofer43211/fxa-content-server,chilts/fxa-content-server,jrgm/fxa-content-server,riadhchtara/fxa-password-manager,mozilla/fxa-content-server,chilts/fxa-content-server,atiqueahmedziad/fxa-content-server,jpetto/fxa-content-server,riadhchtara/fxa-password-manager,swatilk/fxa-content-server,TDA/fxa-two-factor-auth,ofer43211/fxa-content-server,npestana/fxa-content-server,riadhchtara/fxa-content-server,TDA/fxa-content-server
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Ensure l10n is working as expected based on the // user's `Accept-Language` headers define([ 'intern!object', 'intern/chai!assert', 'intern/dojo/node!../../server/lib/configuration', 'intern/dojo/node!request' ], function (registerSuite, assert, config, request) { 'use strict'; var serverUrl = config.get('public_url'); var suite = { name: 'i18n' }; function testClientJson(acceptLanguageHeader, expectedLanguage) { /*jshint validthis: true*/ var dfd = this.async(1000); var headers = {}; if (acceptLanguageHeader) { headers['Accept-Language'] = acceptLanguageHeader; } request(serverUrl + '/i18n/client.json', { headers: headers }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); // Response differs depending on the Accept-Language, let all // intermediaries know this. assert.equal(res.headers.vary, 'accept-language'); var body = JSON.parse(res.body); // yes, body[''] is correct. Language pack meta // info is in the '' field. assert.equal(body[''].language, expectedLanguage); }, dfd.reject.bind(dfd))); } suite['#get /config'] = function () { var dfd = this.async(1000); request(serverUrl + '/config', { headers: { 'Accept-Language': 'es,en;q=0.8,de;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); // Response differs depending on the Accept-Language, let all // intermediaries know this. assert.equal(res.headers.vary, 'accept-language'); var body = JSON.parse(res.body); assert.equal(body.language, 'es'); }, dfd.reject.bind(dfd))); }; suite['#get /config should return language not locale'] = function () { var dfd = this.async(1000); request(serverUrl + '/config', { headers: { 'Accept-Language': 'en-us' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); var body = JSON.parse(res.body); assert.equal(body.language, 'en-us'); }, dfd.reject.bind(dfd))); }; suite['#get /i18n/client.json with multiple supported languages'] = function () { testClientJson.call(this, 'de,en;q=0.8,en;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2', 'de'); }; suite['#get /i18n/client.json with lowercase language'] = function () { testClientJson.call(this, 'en-gb', 'en_GB'); }; suite['#get /i18n/client.json with uppercase language'] = function () { testClientJson.call(this, 'EN-gb', 'en_GB'); }; suite['#get /i18n/client.json with uppercase region'] = function () { testClientJson.call(this, 'en-GB', 'en_GB'); }; suite['#get /i18n/client.json all uppercase language'] = function () { testClientJson.call(this, 'EN-GB', 'en_GB'); }; suite['#get /i18n/client.json for language with multiple regions and only language specified'] = function () { testClientJson.call(this, 'es', 'es'); }; suite['#get /i18n/client.json for language with multiple regions and unsupported region specified'] = function () { testClientJson.call(this, 'es-NONEXISTANT', 'es'); }; suite['#get /i18n/client.json with language with two-part region with an unsupported region specified'] = function () { testClientJson.call(this, 'ja-JP-mac', 'ja'); }; suite['#get /i18n/client.json with unsupported language returns default locale'] = function () { testClientJson.call(this, 'no-OP', 'en_US'); }; suite['#get /i18n/client.json with no locale returns default locale'] = function () { testClientJson.call(this, null, 'en_US'); }; registerSuite(suite); });
tests/server/l10n.js
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // Ensure l10n is working as expected based on the // user's `Accept-Language` headers define([ 'intern!object', 'intern/chai!assert', 'intern/dojo/node!../../server/lib/configuration', 'intern/dojo/node!request' ], function (registerSuite, assert, config, request) { 'use strict'; var serverUrl = config.get('public_url'); var suite = { name: 'i18n' }; suite['#get /config'] = function () { var dfd = this.async(1000); request(serverUrl + '/config', { headers: { 'Accept-Language': 'es,en;q=0.8,de;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); // Response differs depending on the Accept-Language, let all // intermediaries know this. assert.equal(res.headers.vary, 'accept-language'); var body = JSON.parse(res.body); assert.equal(body.language, 'es'); }, dfd.reject.bind(dfd))); }; suite['#get /config should return language not locale'] = function () { var dfd = this.async(1000); request(serverUrl + '/config', { headers: { 'Accept-Language': 'en-us' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); var body = JSON.parse(res.body); assert.equal(body.language, 'en-us'); }, dfd.reject.bind(dfd))); }; suite['#get /i18n/client.json'] = function () { var dfd = this.async(1000); request(serverUrl + '/i18n/client.json', { headers: { 'Accept-Language': 'de,en;q=0.8,en;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); // Response differs depending on the Accept-Language, let all // intermediaries know this. assert.equal(res.headers.vary, 'accept-language'); var body = JSON.parse(res.body); // yes, body[''] is correct. Language pack meta // info is in the '' field. assert.equal(body[''].language, 'de'); }, dfd.reject.bind(dfd))); }; suite['#get /i18n/client.json with lowercase language'] = function () { var dfd = this.async(1000); request(serverUrl + '/i18n/client.json', { headers: { 'Accept-Language': 'en-gb' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); var body = JSON.parse(res.body); assert.equal(body[''].language, 'en_GB'); }, dfd.reject.bind(dfd))); }; suite['#get /i18n/client.json with unsupported locale'] = function () { var dfd = this.async(1000); request(serverUrl + '/i18n/client.json', { headers: { 'Accept-Language': 'no-OP' } }, dfd.callback(function (err, res) { assert.equal(res.statusCode, 200); assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); var body = JSON.parse(res.body); assert.equal(body[''].language, 'en_US'); }, dfd.reject.bind(dfd))); }; registerSuite(suite); });
chore(tests): Add tests to ensure the correct client.json is returned under a variety of circumstances. * all uppercase language * all lowercase language * uppercase region * uppercase language * multiple regions supported, only language specified * unsupported region returns root language * no Accept-Language header sent * Three part locale issue #979
tests/server/l10n.js
chore(tests): Add tests to ensure the correct client.json is returned under a variety of circumstances.
<ide><path>ests/server/l10n.js <ide> var suite = { <ide> name: 'i18n' <ide> }; <add> <add> function testClientJson(acceptLanguageHeader, expectedLanguage) { <add> /*jshint validthis: true*/ <add> var dfd = this.async(1000); <add> <add> var headers = {}; <add> if (acceptLanguageHeader) { <add> headers['Accept-Language'] = acceptLanguageHeader; <add> } <add> <add> request(serverUrl + '/i18n/client.json', { <add> headers: headers <add> }, dfd.callback(function (err, res) { <add> assert.equal(res.statusCode, 200); <add> assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); <add> // Response differs depending on the Accept-Language, let all <add> // intermediaries know this. <add> assert.equal(res.headers.vary, 'accept-language'); <add> <add> var body = JSON.parse(res.body); <add> <add> // yes, body[''] is correct. Language pack meta <add> // info is in the '' field. <add> assert.equal(body[''].language, expectedLanguage); <add> }, dfd.reject.bind(dfd))); <add> } <ide> <ide> suite['#get /config'] = function () { <ide> var dfd = this.async(1000); <ide> }, dfd.reject.bind(dfd))); <ide> }; <ide> <del> suite['#get /i18n/client.json'] = function () { <del> var dfd = this.async(1000); <del> <del> request(serverUrl + '/i18n/client.json', { <del> headers: { <del> 'Accept-Language': 'de,en;q=0.8,en;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2' <del> } <del> }, dfd.callback(function (err, res) { <del> assert.equal(res.statusCode, 200); <del> assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); <del> // Response differs depending on the Accept-Language, let all <del> // intermediaries know this. <del> assert.equal(res.headers.vary, 'accept-language'); <del> <del> var body = JSON.parse(res.body); <del> // yes, body[''] is correct. Language pack meta <del> // info is in the '' field. <del> assert.equal(body[''].language, 'de'); <del> }, dfd.reject.bind(dfd))); <add> suite['#get /i18n/client.json with multiple supported languages'] = function () { <add> testClientJson.call(this, <add> 'de,en;q=0.8,en;q=0.6,en-gb;q=0.4,chrome://global/locale/intl.properties;q=0.2', <add> 'de'); <ide> }; <ide> <ide> suite['#get /i18n/client.json with lowercase language'] = function () { <del> var dfd = this.async(1000); <del> <del> request(serverUrl + '/i18n/client.json', { <del> headers: { <del> 'Accept-Language': 'en-gb' <del> } <del> }, dfd.callback(function (err, res) { <del> assert.equal(res.statusCode, 200); <del> assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); <del> var body = JSON.parse(res.body); <del> <del> assert.equal(body[''].language, 'en_GB'); <del> }, dfd.reject.bind(dfd))); <add> testClientJson.call(this, 'en-gb', 'en_GB'); <ide> }; <ide> <del> suite['#get /i18n/client.json with unsupported locale'] = function () { <del> var dfd = this.async(1000); <add> suite['#get /i18n/client.json with uppercase language'] = function () { <add> testClientJson.call(this, 'EN-gb', 'en_GB'); <add> }; <ide> <del> request(serverUrl + '/i18n/client.json', { <del> headers: { <del> 'Accept-Language': 'no-OP' <del> } <del> }, dfd.callback(function (err, res) { <del> assert.equal(res.statusCode, 200); <del> assert.equal(res.headers['content-type'], 'application/json; charset=utf8'); <del> var body = JSON.parse(res.body); <add> suite['#get /i18n/client.json with uppercase region'] = function () { <add> testClientJson.call(this, 'en-GB', 'en_GB'); <add> }; <ide> <del> assert.equal(body[''].language, 'en_US'); <del> }, dfd.reject.bind(dfd))); <add> suite['#get /i18n/client.json all uppercase language'] = function () { <add> testClientJson.call(this, 'EN-GB', 'en_GB'); <add> }; <add> <add> suite['#get /i18n/client.json for language with multiple regions and only language specified'] = function () { <add> testClientJson.call(this, 'es', 'es'); <add> }; <add> <add> suite['#get /i18n/client.json for language with multiple regions and unsupported region specified'] = function () { <add> testClientJson.call(this, 'es-NONEXISTANT', 'es'); <add> }; <add> <add> suite['#get /i18n/client.json with language with two-part region with an unsupported region specified'] = function () { <add> testClientJson.call(this, 'ja-JP-mac', 'ja'); <add> }; <add> <add> suite['#get /i18n/client.json with unsupported language returns default locale'] = function () { <add> testClientJson.call(this, 'no-OP', 'en_US'); <add> }; <add> <add> suite['#get /i18n/client.json with no locale returns default locale'] = function () { <add> testClientJson.call(this, null, 'en_US'); <ide> }; <ide> <ide> registerSuite(suite);
Java
apache-2.0
55990fa775fe110a913e7e3590030b41e15f6db5
0
GlenKPeterson/UncleJim,GlenKPeterson/UncleJim
// Copyright 2016-05-28 PlanBase Inc. & Glen Peterson // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.organicdesign.fp.experimental; import org.organicdesign.fp.collections.ImList; import org.organicdesign.fp.collections.UnmodSortedIterable; import org.organicdesign.fp.tuple.Tuple2; import java.lang.reflect.Array; import java.util.Arrays; import java.util.List; import static org.organicdesign.fp.StaticImports.tup; /** This is an experiment - DO NOT USE except to test. This is based on the paper, "RRB-Trees: Efficient Immutable Vectors" by Phil Bagwell and Tiark Rompf. With some background from the Cormen, Leiserson, Rivest & Stein Algorithms book entry on B-Trees. Also with an awareness of the Clojure PersistentVector by Rich Hickey. All errors are by Glen Peterson. Priorities: append(item) get(index) insert(item, index) */ public class RrbTree1<E> implements ImList<E> { // Definitions: // Strict: Short for "Strict Radix: meaning that all sub-nodes are a uniform width of exactly RADIX_NODE_LENGTH // (Use a power of 2 to take advantage of bit shifting which is a key performance reason for the uniform // width). Strict nodes have leaf widths of exactly RADIX_NODE_LENGTH and are left-filled and packed up to // the last full node. // Relaxed: In this case refers to "Relaxed Radix" which means the nodes are of somewhat varying sizes. The sizes // range from MIN_NODE_LENGTH (Cormen et al calls this "Minimum Degree") to MAX_NODE_LENGTH. // There's bit shifting going on here because it's a very fast operation. // Shifting right by 5 is eons faster than dividing by 32. // TODO: Change to 5. private static final int NODE_LENGTH_POW_2 = 2; // 2 for testing now, 5 for real later. private static final int RADIX_NODE_LENGTH = 1 << NODE_LENGTH_POW_2;// 0b00000000000000000000000000100000 = 0x20 = 32 // (MIN_NODE_LENGTH + MAX_NODE_LENGTH) / 2 should equal RADIX_NODE_LENGTH so that they have the same average node // size to make the index guessing easier. private static final int MIN_NODE_LENGTH = (RADIX_NODE_LENGTH+1) * 2 / 3; private static final int MAX_NODE_LENGTH = ( (RADIX_NODE_LENGTH+1) * 4 / 3) - 1; // In the PersistentVector, this is called the tail, but here it can be at // Other areas of the tree besides the tail. // private E[] focus; // All the tree nodes from the root to the block in focus. // private Node<E>[] display; // =================================== Array Helper Functions ================================== // We only one empty array and it makes the code simpler than pointing to null all the time. // Have to time the difference between using this and null. The only difference I can imagine // is that this has an address in memory and null does not, so it could save a memory lookup // in some places. private static final Object[] EMPTY_ARRAY = new Object[0]; // Helper function to avoid type warnings. @SuppressWarnings("unchecked") private static <T> T[] emptyArray() { return (T[]) EMPTY_ARRAY; } // Helper function to avoid type warnings. @SuppressWarnings("unchecked") private static <T> T[] singleElementArray(T elem) { return (T[]) new Object[] { elem }; } @SuppressWarnings("unchecked") private static <T> T[] insertIntoArrayAt(T item, T[] items, int idx, Class<T> tClass) { // Make an array that's one bigger. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) ( (tClass == null) ? new Object[items.length + 1] : Array.newInstance(tClass, items.length + 1) ); // If we aren't inserting at the first item, array-copy the items before the insert // point. if (idx > 0) { System.arraycopy(items, 0, newItems, 0, idx); } // Insert the new item. newItems[idx] = item; // If we aren't inserting at the last item, array-copy the items after the insert // point. if (idx < items.length) { System.arraycopy(items, idx, newItems, idx + 1, items.length - idx); } return newItems; } private static <T> T[] insertIntoArrayAt(T item, T[] items, int idx) { return insertIntoArrayAt(item, items, idx, null); } @SuppressWarnings("unchecked") private static <T> T[] spliceIntoArrayAt(T[] insertedItems, T[] origItems, int idx, Class<T> tClass) { // Make an array that big enough. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) Array.newInstance(tClass, insertedItems.length + origItems.length); // If we aren't inserting at the first item, array-copy the items before the insert // point. if (idx > 0) { System.arraycopy(origItems, 0, newItems, 0, idx); } // Insert the new items System.arraycopy(insertedItems, 0, newItems, idx, insertedItems.length); // If we aren't inserting at the last item, array-copy the items after the insert // point. if (idx < origItems.length) { System.arraycopy(origItems, idx, newItems, idx + insertedItems.length, origItems.length - idx); } return newItems; } @SuppressWarnings("unchecked") private static <T> T[] replaceInArrayAt(T replacedItem, T[] origItems, int idx, Class<T> tClass) { // Make an array that big enough. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) ( (tClass == null) ? new Object[origItems.length] : Array.newInstance(tClass, origItems.length) ); System.arraycopy(origItems, 0, newItems, 0, origItems.length); newItems[idx] = replacedItem; return newItems; } private static <T> T[] replaceInArrayAt(T replacedItem, T[] origItems, int idx) { return replaceInArrayAt(replacedItem, origItems, idx, null); } private static RrbTree1 EMPTY_RRB_TREE = new RrbTree1<>(emptyArray(), 0, Leaf.emptyLeaf(), 0); /** This is the public factory method. @return the empty RRB-Tree (there is only one) */ @SuppressWarnings("unchecked") public static <T> RrbTree1<T> empty() { return (RrbTree1<T>) EMPTY_RRB_TREE; } // Focus is like the tail in Rich Hickey's Persistent Vector, but named after the structure // in Scala's implementation. Tail and focus are both designed to allow repeated appends or // inserts to the same area of a vector to be done in constant time. Tail only handles appends // but this can handle repeated inserts to any area of a vector. private final E[] focus; private final int focusStartIndex; private final Node<E> root; private final int size; // Constructor private RrbTree1(E[] f, int fi, Node<E> r, int s) { focus = f; focusStartIndex = fi; root = r; size = s; } @Override public int size() { return size; } @Override public boolean equals(Object other) { if (this == other) { return true; } if ( !(other instanceof List) ) { return false; } List that = (List) other; return (this.size() == that.size()) && UnmodSortedIterable.equals(this, UnmodSortedIterable.castFromList(that)); } /** This is correct, but O(n). This implementation is compatible with java.util.AbstractList. */ @Override public int hashCode() { int ret = 1; for (E item : this) { ret *= 31; if (item != null) { ret += item.hashCode(); } } return ret; } @Override public E get(int i) { // System.out.println(" get(" + i + ")"); if ( (i < 0) || (i > size) ) { throw new IndexOutOfBoundsException("Index: " + i + " size: " + size); } if ( (focusStartIndex < 0) || (focusStartIndex > size) ) { throw new IllegalStateException("focusStartIndex: " + focusStartIndex + " size: " + size); } if (i >= focusStartIndex) { // System.out.println(" i>=focusStartIndex: " + focusStartIndex); int focusOffset = i - focusStartIndex; if (focusOffset < focus.length) { return focus[focusOffset]; } i -= focus.length; } // System.out.println(" focusStartIndex: " + focusStartIndex); // System.out.println(" focus.length: " + focus.length); // System.out.println(" adjusted index: " + i); return root.get(i); } /** Adds an item at the end of this structure. This is the most efficient way to build an RRB Tree as it conforms to the Clojure PersistentVector and all of its optimizations. @param t the item to append @return a new RRB-Tree with the item appended. */ @Override public RrbTree1<E> append(E t) { // System.out.println("=== append(" + t + ") ==="); // If our focus isn't set up for appends or if it's full, insert it into the data structure // where it belongs. Then make a new focus if ( ( (focusStartIndex < root.maxIndex()) && (focus.length > 0) ) || (focus.length >= RADIX_NODE_LENGTH) ) { // TODO: Does focusStartIndex only work for the root node, or is it translated as it goes down? Node<E> newRoot = root.pushFocus(focusStartIndex, focus); E[] newFocus = singleElementArray(t); return new RrbTree1<>(newFocus, size, newRoot, size + 1); } E[] newFocus = insertIntoArrayAt(t, focus, focus.length); return new RrbTree1<>(newFocus, focusStartIndex, root, size + 1); } /** I would have called this insert and reversed the order or parameters. @param idx the insertion point @param element the item to insert @return a new RRB-Tree with the item inserted. */ public RrbTree1<E> insert(int idx, E element) { System.out.println("insert(int " + idx + ", E " + element + ")"); // If the focus is full, push it into the tree and make a new one with the new element. if (focus.length >= RADIX_NODE_LENGTH) { Node<E> newRoot = root.pushFocus(focusStartIndex, focus); E[] newFocus = singleElementArray(element); return new RrbTree1<>(newFocus, idx, newRoot, size + 1); } // If the index is within the focus, add the item there. int diff = idx - focusStartIndex; System.out.println("diff: " + diff); if ( (diff >= 0) && (diff <= focus.length) ) { System.out.println("new focus..."); E[] newFocus = insertIntoArrayAt(element, focus, diff); return new RrbTree1<>(newFocus, focusStartIndex, root, size + 1); } System.out.println("insert somewhere else than the current focus."); System.out.println("focusStartIndex: " + focusStartIndex); System.out.println("focus: " + Arrays.toString(focus)); // Here we are left with an insert somewhere else than the current focus. Node<E> newRoot = focus.length > 0 ? root.pushFocus(focusStartIndex, focus) : root; E[] newFocus = singleElementArray(element); return new RrbTree1<>(newFocus, idx, newRoot, size + 1); } /** Replace the item at the given index. Note: i.replace(i.size(), o) used to be equivalent to i.concat(o), but it probably won't be for the RRB tree implementation, so this will change too. @param i the index where the value should be stored. @param t the value to store @return a new RrbTree1 with the replaced item */ @Override public RrbTree1<E> replace(int i, E t) { if ( (i < 0) || (i > size) ) { throw new IndexOutOfBoundsException("Index: " + i + " size: " + size); } if (i >= focusStartIndex) { int focusOffset = i - focusStartIndex; if (focusOffset < focus.length) { return new RrbTree1<>(replaceInArrayAt(t, focus, focusOffset), focusStartIndex, root, size); } i -= focus.length; } return new RrbTree1<>(focus, focusStartIndex, root.replace(i, t), size); } @Override public String toString() { return "RrbTree(fsi=" + focusStartIndex + " focus=" + Arrays.toString(focus) + "\n root=" + root + ")"; } private interface Node<T> { /** Return the item at the given index */ T get(int i); /** Highest index returnable by this node */ int maxIndex(); /** Returns true if this node's array is not full */ boolean thisNodeHasCapacity(); /** Returns true if this strict-Radix tree can take another 32 items. */ boolean hasStrictCapacity(); /** Can we put focus at the given index without reshuffling nodes? @param index the index we want to insert at @param size the number of items to insert. Must be MIN_NODE_LENGTH <= size <= MAX_NODE_LENGTH @return true if we can do so without otherwise adjusting the tree. */ boolean hasRelaxedCapacity(int index, int size); Tuple2<Node<T>,Node<T>> split(); // Because we want to append/insert into the focus as much as possible, we will treat // the insert or append of a single item as a degenerate case. Instead, the primary way // to add to the internal data structure will be to push the entire focus array into it Node<T> pushFocus(int index, T[] oldFocus); Node<T> replace(int idx, T t); } private static class Leaf<T> implements Node<T> { private static final Leaf EMPTY_LEAF = new Leaf<>(EMPTY_ARRAY); @SuppressWarnings("unchecked") private static final <T> Leaf<T> emptyLeaf() { return (Leaf<T>) EMPTY_LEAF; } final T[] items; // It can only be Strict if items.length == RADIX_NODE_LENGTH and if its parents // are strict. // boolean isStrict; Leaf(T[] ts) { items = ts; } @Override public T get(int i) { return items[i]; } @Override public int maxIndex() { return items.length; } // If we want to add one more to an existing leaf node, it must already be part of a // relaxed tree. @Override public boolean thisNodeHasCapacity() { return items.length < MAX_NODE_LENGTH; } @Override public boolean hasStrictCapacity() { return false; } @Override public boolean hasRelaxedCapacity(int index, int size) { // Appends and prepends need to be a good size, but random inserts do not. // if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { // throw new IllegalArgumentException("Bad size: " + size); // // + " MIN_NODE_LENGTH=" + MIN_NODE_LENGTH + " MAX_NODE_LENGTH=" + MAX_NODE_LENGTH); // } return (items.length + size) < MAX_NODE_LENGTH; } /** This is a Relaxed operation. Performing it on a Strict node causes it and all ancestors to become Relaxed Radix. The parent should only split when size < MIN_NODE_LENGTH during a slice operation. @return Two new nodes. */ @Override public Tuple2<Node<T>,Node<T>> split() { throw new UnsupportedOperationException("Not Implemented Yet"); // System.out.println("Leaf.splitAt(" + i + ")"); // // TODO: if we split for an insert-when-full, one side of the split should be bigger in preparation for the insert. // if (i == 0) { // return tup(emptyLeaf(), this); // } // if (i == items.length) { // // Not sure this can possibly be called, but just in case... // return tup(this, emptyLeaf()); // } // // return tup(new Leaf<>(Arrays.copyOf(items, i)), // new Leaf<>(Arrays.copyOfRange(items, i, items.length - i))); } // I think this can only be called when the root node is a leaf. @SuppressWarnings("unchecked") @Override public Node<T> pushFocus(int index, T[] oldFocus) { if (oldFocus.length == 0) { throw new IllegalStateException("Never call this with an empty focus!"); } // We put the empty Leaf as the root of the empty vector and it stays there // until the first call to this method, at which point, the oldFocus becomes the // new root. if (items.length == 0) { return new Leaf<>(oldFocus); } // Try first to yield a Strict node. For a leaf like this, that means both this node and the pushed // focus are RADIX_NODE_LENGTH. It also means the old focus is being pushed at either the beginning or // the end of this node (not anywhere in-between). if ( (items.length == RADIX_NODE_LENGTH) && (oldFocus.length == RADIX_NODE_LENGTH) && ((index == RADIX_NODE_LENGTH) || (index == 0)) ) { Leaf<T>[] newNodes = (index == RADIX_NODE_LENGTH) ? new Leaf[] { this, new Leaf<>(oldFocus)} : new Leaf[] { new Leaf<>(oldFocus), this }; return new Strict<>(NODE_LENGTH_POW_2, newNodes); } if ((items.length + oldFocus.length) < MAX_NODE_LENGTH) { return new Leaf<>(spliceIntoArrayAt(oldFocus, items, index, (Class<T>) items[0].getClass())); } // We should only get here when the root node is a leaf. // Maybe we should be more circumspect with our array creation, but for now, just jam // jam it into one big array, then split it up for simplicity // TODO: Consider optimizing: T[] newItems = spliceIntoArrayAt(oldFocus, items, index, (Class<T>) items[0].getClass()); System.out.println(" newItems: " + Arrays.toString(newItems)); // Shift right one is divide-by 2. int splitPoint = newItems.length >> 1; System.out.println(" splitPoint: " + splitPoint); T[] left = (T[]) new Object[splitPoint]; T[] right = (T[]) new Object[newItems.length - splitPoint]; // original array, offset, newArray, offset, length System.arraycopy(newItems, 0, left, 0, splitPoint); System.out.println(" left: " + Arrays.toString(left)); System.arraycopy(newItems, splitPoint, right, 0, right.length); System.out.println(" right: " + Arrays.toString(right)); Arrays.copyOf(newItems, splitPoint); Leaf<T> leftLeaf = new Leaf<>(left); Leaf<T> rightLeaf = new Leaf<>(right); // TODO: Could calculate the maxIndex values Relaxed<T> newRelaxed = new Relaxed<>(new int[] { leftLeaf.maxIndex(), leftLeaf.maxIndex() + rightLeaf.maxIndex() }, new Leaf[] { leftLeaf, rightLeaf }); System.out.println(" newRelaxed: " + newRelaxed); return newRelaxed; // System.out.println("pushFocus(" + index + ", " + Arrays.toString(oldFocus) + ")"); // System.out.println(" items: " + Arrays.toString(items)); // System.out.println(" oldFocus: " + Arrays.toString(oldFocus)); // // If we there is room for the entire focus to fit into this node, just stick it in // // there! // if ( (items.length + oldFocus.length) < MAX_NODE_LENGTH ) { // return new Leaf<>(spliceIntoArrayAt(oldFocus, items, index)); // } // Ugh, we have to chop it across 2 arrays. // throw new UnsupportedOperationException("Not implemented yet!"); } @Override public Node<T> replace(int idx, T t) { return new Leaf<>(replaceInArrayAt(t, items, idx)); } // @Override public Leaf<T> insert(int i, T item) { if (!thisNodeHasCapacity()) { throw new IllegalStateException("Called insert, but can't add one more!" + " Parent should have called split first."); } // Return our new node. return new Leaf<>(insertIntoArrayAt(item, items, i)); } @Override public String toString() { // return "Leaf("+ Arrays.toString(items) + ")"; return Arrays.toString(items); } } // end class Leaf // Contains a left-packed tree of exactly 32-item nodes. private static class Strict<T> implements Node<T> { // This is the number of levels below this node (height) times NODE_LENGTH // For speed, we calculate it as height << NODE_LENGTH_POW_2 // TODO: Can we store shift at the top-level Strict only? final int shift; // These are the child nodes final Node<T>[] nodes; // Constructor Strict(int s, Node<T>[] ns) { shift = s; nodes = ns; // System.out.println(" new Strict" + shift + Arrays.toString(ns)); } /** Returns the high bits which we use to index into our array. This is the simplicity (and speed) of Strict indexing. When everything works, this can be inlined for performance. This could maybe yield a good guess for Relaxed nodes? */ private int highBits(int i) { return i >> shift; } /** Returns the low bits of the index (the part Strict sub-nodes need to know about). This helps make this data structure simple and fast. When everything works, this can be inlined for performance. DO NOT use this for Relaxed nodes - they use subtraction instead! */ private int lowBits(int i) { int shifter = -1 << shift; // System.out.println(" shifter (binary): " + Integer.toBinaryString(shift)); int invShifter = ~shifter; // System.out.println(" invShifter (binary): " + Integer.toBinaryString(invShifter)); // System.out.println(" i (binary): " + Integer.toBinaryString(invShifter)); return i & invShifter; // System.out.println(" subNodeIdx (binary): " + Integer.toBinaryString(subNodeIdx)); // System.out.println(" subNodeIdx: " + subNodeIdx); } @Override public T get(int i) { // System.out.println(" Strict.get(" + i + ")"); // Find the node indexed by the high bits (for this height). // Send the low bits on to our sub-nodes. return nodes[highBits(i)].get(lowBits(i)); } @Override public int maxIndex() { int lastNodeIdx = nodes.length - 1; // System.out.println(" Strict.maxIndex()"); // System.out.println(" nodes.length:" + nodes.length); // System.out.println(" shift:" + shift); // System.out.println(" RADIX_NODE_LENGTH:" + RADIX_NODE_LENGTH); // Add up all the full nodes (only the last can be partial) int shiftedLength = lastNodeIdx << shift; // System.out.println(" shifed length:" + shiftedLength); int partialNodeSize = nodes[lastNodeIdx].maxIndex(); // System.out.println(" Remainder:" + partialNodeSize); return shiftedLength + partialNodeSize; } @Override public boolean thisNodeHasCapacity() { return nodes.length < RADIX_NODE_LENGTH; } @Override public boolean hasStrictCapacity() { return thisNodeHasCapacity() || nodes[nodes.length - 1].hasStrictCapacity(); } @Override public boolean hasRelaxedCapacity(int index, int size) { if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { throw new IllegalArgumentException("Bad size: " + size); } // TODO: Very unsure about this implementation! // return highBits(index) == nodes.length - 1; // It has relaxed capacity because a Relaxed node could have up to MAX_NODE_LENGTH nodes and by definition // this Strict node has no more than RADIX_NODE_LENGTH items. return true; } @Override public Tuple2<Node<T>,Node<T>> split() { // System.out.println("Strict.splitAt(" + i + ")"); // TODO: Implement throw new UnsupportedOperationException("Not implemented yet"); } @SuppressWarnings("unchecked") @Override public Node<T> pushFocus(int index, T[] oldFocus) { // System.out.println("Strict pushFocus(" + Arrays.toString(oldFocus) + ", " + index + ")"); // System.out.println(" this: " + this); // If the proper sub-node can take the additional array, let it! int subNodeIndex = highBits(index); // System.out.println(" subNodeIndex: " + subNodeIndex); // It's a strict-compatible addition if the focus being pushed is of // RADIX_NODE_LENGTH and the index it's pushed to falls on the final leaf-node boundary // and the children of this node are leaves and this node is not full. if (oldFocus.length == RADIX_NODE_LENGTH) { if (index == maxIndex()) { Node<T> lastNode = nodes[nodes.length - 1]; if (lastNode.hasStrictCapacity()) { // System.out.println(" Pushing the focus down to a lower-level node with capacity."); Node<T> newNode = lastNode.pushFocus(lowBits(index), oldFocus); Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, nodes.length - 1, Node.class); return new Strict<>(shift, newNodes); } // Regardless of what else happens, we're going to add a new node. Node<T> newNode = new Leaf<>(oldFocus); // Make a skinny branch of a tree by walking up from the leaf node until our // new branch is at the same level as the old one. We have to build evenly // (like hotels in Monopoly) in order to keep the tree balanced. Even height, // but left-packed (the lower indices must all be filled before adding new // nodes to the right). int newShift = NODE_LENGTH_POW_2; // If we've got space in our array, we just have to add skinny-branch nodes up to // the level below ours. But if we don't have space, we have to add a // single-element strict node at the same level as ours here too. int maxShift = (nodes.length < RADIX_NODE_LENGTH) ? shift : shift + 1; // Make the skinny-branch of single-element strict nodes: while (newShift < maxShift) { // System.out.println(" Adding a skinny branch node..."); Node<T>[] newNodes = (Node<T>[]) Array.newInstance(newNode.getClass(), 1); newNodes[0] = newNode; newNode = new Strict<>(newShift, newNodes); newShift += NODE_LENGTH_POW_2; } if ((nodes.length < RADIX_NODE_LENGTH)) { // System.out.println(" Adding a node to the existing array"); Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // This could allow cheap strict inserts on any leaf-node boundary... return new Strict<>(shift, newNodes); } else { // System.out.println(" Adding a level to the Strict tree"); return new Strict(shift + NODE_LENGTH_POW_2, new Node[]{this, newNode}); } } else if ( (shift == NODE_LENGTH_POW_2) && (lowBits(index) == 0) && (nodes.length < RADIX_NODE_LENGTH) ) { // Here we are: // Pushing a RADIX_NODE_LENGTH focus // At the level above the leaf nodes // Inserting *between* existing leaf nodes (or before or after) // Have room for at least one more leaf child // That makes it free and legal to insert a new RADIX_NODE_LENGTH leaf node and still yield a // Strict (as opposed to Relaxed). // Regardless of what else happens, we're going to add a new node. Node<T> newNode = new Leaf<>(oldFocus); Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // This allows cheap strict inserts on any leaf-node boundary... return new Strict<>(shift, newNodes); } } // end if oldFocus.length == RADIX_NODE_LENGTH // Here we're going to yield a Relaxed Radix node, so punt to that (slower) logic. // System.out.println("Yield a Relaxed node."); int[] endIndices = new int[nodes.length]; for (int i = 0; i < endIndices.length; i++) { endIndices[i] = (i + 1) << shift; } // System.out.println("End indices: " + Arrays.toString(endIndices)); return new Relaxed<>(endIndices, nodes).pushFocus(index, oldFocus); } @SuppressWarnings("unchecked") @Override public Node<T> replace(int idx, T t) { // System.out.println(" Strict.get(" + i + ")"); // Find the node indexed by the high bits (for this height). // Send the low bits on to our sub-nodes. int thisNodeIdx = highBits(idx); Node<T> newNode = nodes[thisNodeIdx].replace(lowBits(idx), t); return new Strict<>(shift, replaceInArrayAt(newNode, nodes, thisNodeIdx, Node.class)); } // @Override public Tuple2<Strict<T>,Strict<T>> split() { // Strict<T> right = new Strict<T>(shift, new Strict[0]); // return tup(this, right); // } @Override public String toString() { // return "Strict(nodes.length="+ nodes.length + ", shift=" + shift + ")"; return "Strict" + shift + Arrays.toString(nodes); } } // Contains a relaxed tree of nodes that average around 32 items each. private static class Relaxed<T> implements Node<T> { // The max index stored in each sub-node. This is a separate array so it can be retrieved // in a single memory fetch. Note that this is a 1-based index, or really a count, not a // normal zero-based index. final int[] endIndices; // The sub nodes final Node<T>[] nodes; // Constructor Relaxed(int[] is, Node<T>[] ns) { endIndices = is; nodes = ns; // TODO: These are constraint validations to prevent implementation bugs - remove before shipping. if (endIndices.length < 1) { throw new IllegalArgumentException("endIndices.length < 1"); } if (nodes.length < 1) { throw new IllegalArgumentException("nodes.length < 1"); } if (endIndices.length != nodes.length) { throw new IllegalArgumentException("endIndices.length:" + endIndices.length + " != nodes.length:" + nodes.length); } int endIdx = 0; for (int i = 0; i < nodes.length; i++) { endIdx += nodes[i].maxIndex(); if (endIdx != endIndices[i]) { throw new IllegalArgumentException("nodes[" + i + "].maxIndex() was " + nodes[i].maxIndex() + " which is not compatable with endIndices[" + i + "] which was " + endIndices[i] + "\n" + " endIndices: " + Arrays.toString(endIndices) + " nodes: " + Arrays.toString(nodes)); } } } @Override public int maxIndex() { return endIndices[endIndices.length - 1]; } /** Converts the index of an item into the index of the sub-node containing that item. @param index The index of the item in the entire tree @return The index of the branch of the tree (the sub-node and its ancestors) the item resides in. */ private int subNodeIndex(int index) { // Index range: 0 to maxIndex() // Result Range: 0 to startIndices.length // liner interpolation: index/maxIndex() = result/startIndices.length // result = index * startIndices.length / maxIndex(); // int guess = index * startIndices.length / maxIndex(); // int guessedItem = startIndices[guess]; // while (guessedItem > (index + MIN_NODE_LENGTH)) { // guessedItem = startIndices[--guess]; // } // while (guessedItem < index) { // guessedItem = startIndices[++guess]; // } // TODO: This is really slow. Do linear interpolation instead. for (int i = 0; i < endIndices.length; i++) { if (index < endIndices[i]) { return i; } } // For an append just one element beyond the end of the existing data structure, // just try to add it to the last node. This might seem overly permissive to accept // these as inserts or appends without differentiating between the two, but it flows // naturally with this data structure and I think makes it easier to use without // encouraging user programming errors. // Hopefully this still leads to a relatively balanced tree... if (index == endIndices[endIndices.length - 1]) { return endIndices.length - 1; } throw new IllegalStateException("Should be unreachable! index: " + index + " this: " + this.toString()); } /** Converts the index of an item into the index to pass to the sub-node containing that item. @param index The index of the item in the entire tree @param subNodeIndex the index into this node's array of sub-nodes. @return The index to pass to the sub-branch the item resides in */ private int subNodeAdjustedIndex(int index, int subNodeIndex) { return (subNodeIndex == 0) ? index : index - endIndices[subNodeIndex - 1]; } @Override public T get(int index) { // System.out.println(" Relaxed.get(" + index + ")"); int subNodeIndex = subNodeIndex(index); // System.out.println(" subNodeIndex: " + subNodeIndex); // System.out.println(" subNodeAdjustedIndex(index, subNodeIndex): " + subNodeAdjustedIndex(index, subNodeIndex)); return nodes[subNodeIndex].get(subNodeAdjustedIndex(index, subNodeIndex)); } @Override public Tuple2<Node<T>,Node<T>> split() { // System.out.println("Relaxed.splitAt(" + i + ")"); int midpoint = nodes.length >> 1; // Shift-right one is the same as dividing by 2. Relaxed<T> left = new Relaxed<>(Arrays.copyOf(endIndices, midpoint), Arrays.copyOf(nodes, midpoint)); int[] rightEndIndices = new int[nodes.length - midpoint]; int leftEndIdx = endIndices[midpoint - 1]; for (int j = 0; j < rightEndIndices.length; j++) { rightEndIndices[j] = endIndices[midpoint + j] - leftEndIdx; } // I checked this at javaRepl and indeed this starts from the correct item. Relaxed<T> right = new Relaxed<>(rightEndIndices, Arrays.copyOfRange(nodes, midpoint, nodes.length)); return tup(left, right); } @Override public boolean thisNodeHasCapacity() { // System.out.println("thisNodeHasCapacity(): nodes.length=" + nodes.length + " MAX_NODE_LENGTH=" + MAX_NODE_LENGTH + " MIN_NODE_LENGTH=" + MIN_NODE_LENGTH + " RADIX_NODE_LENGTH=" + RADIX_NODE_LENGTH); return nodes.length < MAX_NODE_LENGTH; } // I don't think this should ever be called. Should this throw an exception instead? @Override public boolean hasStrictCapacity() { throw new UnsupportedOperationException("I don't think this should ever be called."); // return false; } @Override public boolean hasRelaxedCapacity(int index, int size) { if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { throw new IllegalArgumentException("Bad size: " + size); } if (thisNodeHasCapacity()) { return true; } int subNodeIndex = subNodeIndex(index); return nodes[subNodeIndex].hasRelaxedCapacity(subNodeAdjustedIndex(index, subNodeIndex), size); } @Override public Node<T> pushFocus(int index, T[] oldFocus) { // System.out.println("Relaxed pushFocus(" + index + ", " + Arrays.toString(oldFocus) + ")"); // System.out.println(" this: " + this); int subNodeIndex = subNodeIndex(index); Node<T> subNode = nodes[subNodeIndex]; // System.out.println(" subNode: " + subNode); int subNodeAdjustedIndex = subNodeAdjustedIndex(index, subNodeIndex); // Does the subNode have space enough to handle it? if (subNode.hasRelaxedCapacity(subNodeAdjustedIndex, oldFocus.length)) { // System.out.println(" Pushing the focus down to a lower-level node with capacity."); Node<T> newNode = subNode.pushFocus(subNodeAdjustedIndex, oldFocus); // Make a copy of our nodesArray, replacing the old node at subNodeIndex with the new. Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, subNodeIndex, Node.class); // Increment endIndicies for the changed item and all items to the right. int[] newEndIndices = new int[endIndices.length]; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex); } for (int i = subNodeIndex; i < endIndices.length; i++) { newEndIndices[i] = endIndices[i] + oldFocus.length; } return new Relaxed<>(newEndIndices, newNodes); } // I think this is a root node thing. if (!thisNodeHasCapacity()) { // TODO: Figure out optimal place to split // For now, split at half of maxIndex. Tuple2<Node<T>,Node<T>> split = split(); Node<T> node1 = split._1(); Node<T> node2 = split._2(); // System.out.println("Split node1: " + node1); // System.out.println("Split node2: " + node2); Relaxed<T> newRelaxed = new Relaxed<>(new int[] {node1.maxIndex(), node1.maxIndex() + node2.maxIndex()}, (Node<T>[]) new Node[] {node1, node2}); // System.out.println("newRelaxed3: " + newRelaxed); return newRelaxed.pushFocus(index, oldFocus); } if (subNode instanceof Leaf) { // System.out.println("Leaf!"); if (subNodeAdjustedIndex == 0) { // Just add a new leaf Leaf<T> newNode = new Leaf<>(oldFocus); Node<T>[] newNodes = insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // Increment endIndicies for the changed item and all items to the right. int[] newEndIndices = new int[endIndices.length + 1]; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); } newEndIndices[subNodeIndex] = oldFocus.length; for (int i = subNodeIndex + 1; i < newEndIndices.length; i++) { newEndIndices[i] = endIndices[i - 1] + oldFocus.length; } Relaxed<T> newRelaxed = new Relaxed<>(newEndIndices, newNodes); // System.out.println("newRelaxed1: " + newRelaxed); return newRelaxed; } // if (subNodeAdjustedIndex == subNode.maxIndex()) { // // // DIFFERENT: // subNodeIndex++; // // END DIFFERENT // // // Just add a new leaf // Leaf<T> newNode = new Leaf<>(oldFocus); // // // TODO: Copied from above. // Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, subNodeIndex, Node.class); // // Increment endIndicies for the changed item and all items to the right. // int[] newEndIndices = new int[endIndices.length]; // if (subNodeIndex > 0) { // System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); // } // for (int i = subNodeIndex; i < endIndices.length; i++) { // newEndIndices[i] = endIndices[i] + oldFocus.length; // } // return new Relaxed<>(newEndIndices, newNodes); // // TODO: END Copied from above. // // } // TODO: Implement! throw new UnsupportedOperationException("Not implemented yet"); } // end if subNode instanceof Leaf // Here we have capacity and it's not a leaf, so we have to split the appropriate sub-node. // int prevNodeMaxIdx = endIndices[(subNodeIndex > 0) ? subNodeIndex - 1 // : 0]; // int newIdx = index - prevNodeMaxIdx; // For now, split at half of maxIndex. // System.out.println("About to split: " + subNode); // System.out.println("Split at: " + (subNode.maxIndex() >> 1)); Tuple2<Node<T>,Node<T>> newSubNode = subNode.split(); Node<T> node1 = newSubNode._1(); Node<T> node2 = newSubNode._2(); // System.out.println("Split node1: " + node1); // System.out.println("Split node2: " + node2); Node<T>[] newNodes = (Node<T>[]) new Node[nodes.length + 1]; // If we aren't inserting at the first item, array-copy the nodes before the insert // point. if (subNodeIndex > 0) { System.arraycopy(nodes, 0, newNodes, 0, subNodeIndex); } // Insert the new item. newNodes[subNodeIndex] = node1; newNodes[subNodeIndex + 1] = node2; // If we aren't inserting at the last item, array-copy the nodes after the insert // point. if (subNodeIndex < nodes.length) { System.arraycopy(nodes, subNodeIndex + 1, newNodes, subNodeIndex + 2, nodes.length - subNodeIndex - 1); } int[] newEndIndices = new int[endIndices.length + 1]; int prevEndIdx = 0; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); prevEndIdx = endIndices[subNodeIndex - 1]; } for (int i = subNodeIndex; i < newEndIndices.length; i++) { // TODO: Calculate instead of loading into memory. See splitAt calculation above. prevEndIdx += newNodes[i].maxIndex(); newEndIndices[i] = prevEndIdx; } Relaxed<T> newRelaxed = new Relaxed<>(newEndIndices, newNodes); // System.out.println("newRelaxed2: " + newRelaxed); return newRelaxed.pushFocus(index, oldFocus); // // // Regardless of what else happens, we're going to add a new node. // Node<T> newNode = new Leaf<>(oldFocus); // // // Make a skinny branch of a tree by walking up from the leaf node until our // // new branch is at the same level as the old one. We have to build evenly // // (like hotels in Monopoly) in order to keep the tree balanced. // int newHeight = 0; // // // If we've got space in our array, we just have to add skinny-branch nodes up to // // the level below ours. But if we don't have space, we have to add a // // single-element strict node at the same level as ours here too. // int maxHeight = (nodes.length < MAX_NODE_LENGTH) ? height : height + 1; // // // Make the skinny-branch of single-element strict nodes: // while (newHeight < maxHeight) { // // System.out.println(" Adding a skinny branch node..."); // Node<T>[] newNodes = (Node<T>[]) Array.newInstance(newNode.getClass(), 1); // newNodes[0] = newNode; // int[] newEndIndices = new int[] { oldFocus.length }; // newNode = new Relaxed<>(newHeight, newEndIndices, newNodes); // newHeight++; // } // // if ((nodes.length < RADIX_NODE_LENGTH)) { // // System.out.println(" Adding a node to the existing array"); // Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // // This could allow cheap strict inserts on any leaf-node boundary... // return new Strict<>(shift, newNodes); // } else { // // System.out.println(" Adding a level to the Strict tree"); // return new Strict(shift + NODE_LENGTH_POW_2, // new Node[]{this, newNode}); // } // TODO: Not finished - working here! // System.out.println(" oldFocus.length: " + oldFocus.length); // System.out.println(" index: " + index); // System.out.println(" maxIndex(): " + maxIndex()); // System.out.println(" nodes.length: " + nodes.length); // System.out.println(" this: " + this); // // TODO: Implement // throw new UnsupportedOperationException("Not Implemented Yet"); } @Override public Node<T> replace(int idx, T t) { // TODO: Implement throw new UnsupportedOperationException("Not Implemented Yet"); } @Override public String toString() { return "Relaxed(endIndicies=" + Arrays.toString(endIndices) + " nodes=" + Arrays.toString(nodes).replaceAll(", Relaxed\\(", ",\n Relaxed(") + ")"; // return "Relaxed(nodes.length="+ nodes.length + ")"; } } // end class Relaxed } // end class RrbTree
src/main/java/org/organicdesign/fp/experimental/RrbTree1.java
// Copyright 2016-05-28 PlanBase Inc. & Glen Peterson // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.organicdesign.fp.experimental; import org.organicdesign.fp.collections.ImList; import org.organicdesign.fp.collections.UnmodSortedIterable; import org.organicdesign.fp.tuple.Tuple2; import java.lang.reflect.Array; import java.util.Arrays; import java.util.List; import static org.organicdesign.fp.StaticImports.tup; /** This is an experiment - DO NOT USE except to test. This is based on the paper, "RRB-Trees: Efficient Immutable Vectors" by Phil Bagwell and Tiark Rompf. With some background from the Cormen, Leiserson, Rivest & Stein Algorithms book entry on B-Trees. Also with an awareness of the Clojure PersistentVector by Rich Hickey. All errors are by Glen Peterson. Priorities: append(item) get(index) insert(item, index) */ public class RrbTree1<E> implements ImList<E> { // Definitions: // Strict: Short for "Strict Radix: meaning that all sub-nodes are a uniform width of exactly RADIX_NODE_LENGTH // (Use a power of 2 to take advantage of bit shifting which is a key performance reason for the uniform // width). Strict nodes have leaf widths of exactly RADIX_NODE_LENGTH and are left-filled and packed up to // the last full node. // Relaxed: In this case refers to "Relaxed Radix" which means the nodes are of somewhat varying sizes. The sizes // range from MIN_NODE_LENGTH (Cormen et al calls this "Minimum Degree") to MAX_NODE_LENGTH. // There's bit shifting going on here because it's a very fast operation. // Shifting right by 5 is eons faster than dividing by 32. // TODO: Change to 5. private static final int NODE_LENGTH_POW_2 = 2; // 2 for testing now, 5 for real later. private static final int RADIX_NODE_LENGTH = 1 << NODE_LENGTH_POW_2;// 0b00000000000000000000000000100000 = 0x20 = 32 // (MIN_NODE_LENGTH + MAX_NODE_LENGTH) / 2 should equal RADIX_NODE_LENGTH so that they have the same average node // size to make the index guessing easier. private static final int MIN_NODE_LENGTH = (RADIX_NODE_LENGTH+1) * 2 / 3; private static final int MAX_NODE_LENGTH = ( (RADIX_NODE_LENGTH+1) * 4 / 3) - 1; // In the PersistentVector, this is called the tail, but here it can be at // Other areas of the tree besides the tail. // private E[] focus; // All the tree nodes from the root to the block in focus. // private Node<E>[] display; // =================================== Array Helper Functions ================================== // We only one empty array and it makes the code simpler than pointing to null all the time. // Have to time the difference between using this and null. The only difference I can imagine // is that this has an address in memory and null does not, so it could save a memory lookup // in some places. private static final Object[] EMPTY_ARRAY = new Object[0]; // Helper function to avoid type warnings. @SuppressWarnings("unchecked") private static <T> T[] emptyArray() { return (T[]) EMPTY_ARRAY; } // Helper function to avoid type warnings. @SuppressWarnings("unchecked") private static <T> T[] singleElementArray(T elem) { return (T[]) new Object[] { elem }; } @SuppressWarnings("unchecked") private static <T> T[] insertIntoArrayAt(T item, T[] items, int idx, Class<T> tClass) { // Make an array that's one bigger. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) ( (tClass == null) ? new Object[items.length + 1] : Array.newInstance(tClass, items.length + 1) ); // If we aren't inserting at the first item, array-copy the items before the insert // point. if (idx > 0) { System.arraycopy(items, 0, newItems, 0, idx); } // Insert the new item. newItems[idx] = item; // If we aren't inserting at the last item, array-copy the items after the insert // point. if (idx < items.length) { System.arraycopy(items, idx, newItems, idx + 1, items.length - idx); } return newItems; } private static <T> T[] insertIntoArrayAt(T item, T[] items, int idx) { return insertIntoArrayAt(item, items, idx, null); } @SuppressWarnings("unchecked") private static <T> T[] spliceIntoArrayAt(T[] insertedItems, T[] origItems, int idx, Class<T> tClass) { // Make an array that big enough. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) Array.newInstance(tClass, insertedItems.length + origItems.length); // If we aren't inserting at the first item, array-copy the items before the insert // point. if (idx > 0) { System.arraycopy(origItems, 0, newItems, 0, idx); } // Insert the new items System.arraycopy(insertedItems, 0, newItems, idx, insertedItems.length); // If we aren't inserting at the last item, array-copy the items after the insert // point. if (idx < origItems.length) { System.arraycopy(origItems, idx, newItems, idx + insertedItems.length, origItems.length - idx); } return newItems; } @SuppressWarnings("unchecked") private static <T> T[] replaceInArrayAt(T replacedItem, T[] origItems, int idx, Class<T> tClass) { // Make an array that big enough. It's too bad that the JVM bothers to // initialize this with nulls. T[] newItems = (T[]) ( (tClass == null) ? new Object[origItems.length] : Array.newInstance(tClass, origItems.length) ); System.arraycopy(origItems, 0, newItems, 0, origItems.length); newItems[idx] = replacedItem; return newItems; } private static <T> T[] replaceInArrayAt(T replacedItem, T[] origItems, int idx) { return replaceInArrayAt(replacedItem, origItems, idx, null); } private static RrbTree1 EMPTY_RRB_TREE = new RrbTree1<>(emptyArray(), 0, Leaf.emptyLeaf(), 0); /** This is the public factory method. @return the empty RRB-Tree (there is only one) */ @SuppressWarnings("unchecked") public static <T> RrbTree1<T> empty() { return (RrbTree1<T>) EMPTY_RRB_TREE; } // Focus is like the tail in Rich Hickey's Persistent Vector, but named after the structure // in Scala's implementation. Tail and focus are both designed to allow repeated appends or // inserts to the same area of a vector to be done in constant time. Tail only handles appends // but this can handle repeated inserts to any area of a vector. private final E[] focus; private final int focusStartIndex; private final Node<E> root; private final int size; // Constructor private RrbTree1(E[] f, int fi, Node<E> r, int s) { focus = f; focusStartIndex = fi; root = r; size = s; } @Override public int size() { return size; } @Override public boolean equals(Object other) { if (this == other) { return true; } if ( !(other instanceof List) ) { return false; } List that = (List) other; return (this.size() == that.size()) && UnmodSortedIterable.equals(this, UnmodSortedIterable.castFromList(that)); } /** This is correct, but O(n). This implementation is compatible with java.util.AbstractList. */ @Override public int hashCode() { int ret = 1; for (E item : this) { ret *= 31; if (item != null) { ret += item.hashCode(); } } return ret; } @Override public E get(int i) { // System.out.println(" get(" + i + ")"); if ( (i < 0) || (i > size) ) { throw new IndexOutOfBoundsException("Index: " + i + " size: " + size); } if ( (focusStartIndex < 0) || (focusStartIndex > size) ) { throw new IllegalStateException("focusStartIndex: " + focusStartIndex + " size: " + size); } if (i >= focusStartIndex) { // System.out.println(" i>=focusStartIndex: " + focusStartIndex); int focusOffset = i - focusStartIndex; if (focusOffset < focus.length) { return focus[focusOffset]; } i -= focus.length; } // System.out.println(" focusStartIndex: " + focusStartIndex); // System.out.println(" focus.length: " + focus.length); // System.out.println(" adjusted index: " + i); return root.get(i); } /** Adds an item at the end of this structure. This is the most efficient way to build an RRB Tree as it conforms to the Clojure PersistentVector and all of its optimizations. @param t the item to append @return a new RRB-Tree with the item appended. */ @Override public RrbTree1<E> append(E t) { // System.out.println("=== append(" + t + ") ==="); // If our focus isn't set up for appends or if it's full, insert it into the data structure // where it belongs. Then make a new focus if ( ( (focusStartIndex < root.maxIndex()) && (focus.length > 0) ) || (focus.length >= RADIX_NODE_LENGTH) ) { // TODO: Does focusStartIndex only work for the root node, or is it translated as it goes down? Node<E> newRoot = root.pushFocus(focusStartIndex, focus); E[] newFocus = singleElementArray(t); return new RrbTree1<>(newFocus, size, newRoot, size + 1); } E[] newFocus = insertIntoArrayAt(t, focus, focus.length); return new RrbTree1<>(newFocus, focusStartIndex, root, size + 1); } /** I would have called this insert and reversed the order or parameters. @param idx the insertion point @param element the item to insert @return a new RRB-Tree with the item inserted. */ public RrbTree1<E> insert(int idx, E element) { System.out.println("insert(int " + idx + ", E " + element + ")"); // If the focus is full, push it into the tree and make a new one with the new element. if (focus.length >= RADIX_NODE_LENGTH) { Node<E> newRoot = root.pushFocus(focusStartIndex, focus); E[] newFocus = singleElementArray(element); return new RrbTree1<>(newFocus, idx, newRoot, size + 1); } // If the index is within the focus, add the item there. int diff = idx - focusStartIndex; System.out.println("diff: " + diff); if ( (diff >= 0) && (diff <= focus.length) ) { System.out.println("new focus..."); E[] newFocus = insertIntoArrayAt(element, focus, diff); return new RrbTree1<>(newFocus, focusStartIndex, root, size + 1); } System.out.println("insert somewhere else than the current focus."); System.out.println("focusStartIndex: " + focusStartIndex); System.out.println("focus: " + Arrays.toString(focus)); // Here we are left with an insert somewhere else than the current focus. Node<E> newRoot = focus.length > 0 ? root.pushFocus(focusStartIndex, focus) : root; E[] newFocus = singleElementArray(element); return new RrbTree1<>(newFocus, idx, newRoot, size + 1); } /** Replace the item at the given index. Note: i.replace(i.size(), o) used to be equivalent to i.concat(o), but it probably won't be for the RRB tree implementation, so this will change too. @param i the index where the value should be stored. @param t the value to store @return a new RrbTree1 with the replaced item */ @Override public RrbTree1<E> replace(int i, E t) { if ( (i < 0) || (i > size) ) { throw new IndexOutOfBoundsException("Index: " + i + " size: " + size); } if (i >= focusStartIndex) { int focusOffset = i - focusStartIndex; if (focusOffset < focus.length) { return new RrbTree1<>(replaceInArrayAt(t, focus, focusOffset), focusStartIndex, root, size); } i -= focus.length; } return new RrbTree1<>(focus, focusStartIndex, root.replace(i, t), size); } @Override public String toString() { return "RrbTree(fsi=" + focusStartIndex + " focus=" + Arrays.toString(focus) + "\n root=" + root + ")"; } private interface Node<T> { /** Return the item at the given index */ T get(int i); /** Highest index returnable by this node */ int maxIndex(); /** Inserts an item at the given index */ // @Override public Node<T> insert(int i, T item); // Node<T> append(T item); /** Returns true if this node's array is not full */ boolean thisNodeHasCapacity(); /** Returns true if this strict-Radix tree can take another 32 items. */ boolean hasStrictCapacity(); /** Can we put focus at the given index without reshuffling nodes? @param index the index we want to insert at @param size the number of items to insert. Must be MIN_NODE_LENGTH <= size <= MAX_NODE_LENGTH @return true if we can do so without otherwise adjusting the tree. */ boolean hasRelaxedCapacity(int index, int size); Tuple2<Node<T>,Node<T>> split(); // Because we want to append/insert into the focus as much as possible, we will treat // the insert or append of a single item as a degenerate case. Instead, the primary way // to add to the internal data structure will be to push the entire focus array into it Node<T> pushFocus(int index, T[] oldFocus); Node<T> replace(int idx, T t); } private static class Leaf<T> implements Node<T> { private static final Leaf EMPTY_LEAF = new Leaf<>(EMPTY_ARRAY); @SuppressWarnings("unchecked") private static final <T> Leaf<T> emptyLeaf() { return (Leaf<T>) EMPTY_LEAF; } final T[] items; // It can only be Strict if items.length == RADIX_NODE_LENGTH and if its parents // are strict. // boolean isStrict; Leaf(T[] ts) { items = ts; } @Override public T get(int i) { return items[i]; } @Override public int maxIndex() { return items.length; } // @Override public Node<T> append(T item) { // T[] newItems = Arrays.copyOf(items, items.length + 1); // newItems[items.length] = item; // return new Leaf<>(newItems); // } // If we want to add one more to an existing leaf node, it must already be part of a // relaxed tree. @Override public boolean thisNodeHasCapacity() { return items.length < MAX_NODE_LENGTH; } @Override public boolean hasStrictCapacity() { return false; } @Override public boolean hasRelaxedCapacity(int index, int size) { // Appends and prepends need to be a good size, but random inserts do not. // if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { // throw new IllegalArgumentException("Bad size: " + size); // // + " MIN_NODE_LENGTH=" + MIN_NODE_LENGTH + " MAX_NODE_LENGTH=" + MAX_NODE_LENGTH); // } return (items.length + size) < MAX_NODE_LENGTH; } /** This is a Relaxed operation. Performing it on a Strict node causes it and all ancestors to become Relaxed Radix. The parent should only split when size < MIN_NODE_LENGTH during a slice operation. @return Two new nodes. */ @Override public Tuple2<Node<T>,Node<T>> split() { throw new UnsupportedOperationException("Not Implemented Yet"); // System.out.println("Leaf.splitAt(" + i + ")"); // // TODO: if we split for an insert-when-full, one side of the split should be bigger in preparation for the insert. // if (i == 0) { // return tup(emptyLeaf(), this); // } // if (i == items.length) { // // Not sure this can possibly be called, but just in case... // return tup(this, emptyLeaf()); // } // // return tup(new Leaf<>(Arrays.copyOf(items, i)), // new Leaf<>(Arrays.copyOfRange(items, i, items.length - i))); } // I think this can only be called when the root node is a leaf. @SuppressWarnings("unchecked") @Override public Node<T> pushFocus(int index, T[] oldFocus) { if (oldFocus.length == 0) { throw new IllegalStateException("Never call this with an empty focus!"); } // We put the empty Leaf as the root of the empty vector and it stays there // until the first call to this method, at which point, the oldFocus becomes the // new root. if (items.length == 0) { return new Leaf<>(oldFocus); } // Try first to yield a Strict node. For a leaf like this, that means both this node and the pushed // focus are RADIX_NODE_LENGTH. It also means the old focus is being pushed at either the beginning or // the end of this node (not anywhere in-between). if ( (items.length == RADIX_NODE_LENGTH) && (oldFocus.length == RADIX_NODE_LENGTH) && ((index == RADIX_NODE_LENGTH) || (index == 0)) ) { Leaf<T>[] newNodes = (index == RADIX_NODE_LENGTH) ? new Leaf[] { this, new Leaf<>(oldFocus)} : new Leaf[] { new Leaf<>(oldFocus), this }; return new Strict<>(NODE_LENGTH_POW_2, newNodes); } if ((items.length + oldFocus.length) < MAX_NODE_LENGTH) { return new Leaf<>(spliceIntoArrayAt(oldFocus, items, index, (Class<T>) items[0].getClass())); } // We should only get here when the root node is a leaf. // Maybe we should be more circumspect with our array creation, but for now, just jam // jam it into one big array, then split it up for simplicity // TODO: Consider optimizing: T[] newItems = spliceIntoArrayAt(oldFocus, items, index, (Class<T>) items[0].getClass()); System.out.println(" newItems: " + Arrays.toString(newItems)); // Shift right one is divide-by 2. int splitPoint = newItems.length >> 1; System.out.println(" splitPoint: " + splitPoint); T[] left = (T[]) new Object[splitPoint]; T[] right = (T[]) new Object[newItems.length - splitPoint]; // original array, offset, newArray, offset, length System.arraycopy(newItems, 0, left, 0, splitPoint); System.out.println(" left: " + Arrays.toString(left)); System.arraycopy(newItems, splitPoint, right, 0, right.length); System.out.println(" right: " + Arrays.toString(right)); Arrays.copyOf(newItems, splitPoint); Leaf<T> leftLeaf = new Leaf<>(left); Leaf<T> rightLeaf = new Leaf<>(right); // TODO: Could calculate the maxIndex values Relaxed<T> newRelaxed = new Relaxed<>(new int[] { leftLeaf.maxIndex(), leftLeaf.maxIndex() + rightLeaf.maxIndex() }, new Leaf[] { leftLeaf, rightLeaf }); System.out.println(" newRelaxed: " + newRelaxed); return newRelaxed; // System.out.println("pushFocus(" + index + ", " + Arrays.toString(oldFocus) + ")"); // System.out.println(" items: " + Arrays.toString(items)); // System.out.println(" oldFocus: " + Arrays.toString(oldFocus)); // // If we there is room for the entire focus to fit into this node, just stick it in // // there! // if ( (items.length + oldFocus.length) < MAX_NODE_LENGTH ) { // return new Leaf<>(spliceIntoArrayAt(oldFocus, items, index)); // } // Ugh, we have to chop it across 2 arrays. // throw new UnsupportedOperationException("Not implemented yet!"); } @Override public Node<T> replace(int idx, T t) { return new Leaf<>(replaceInArrayAt(t, items, idx)); } // @Override public Leaf<T> insert(int i, T item) { if (!thisNodeHasCapacity()) { throw new IllegalStateException("Called insert, but can't add one more!" + " Parent should have called split first."); } // Return our new node. return new Leaf<>(insertIntoArrayAt(item, items, i)); } @Override public String toString() { // return "Leaf("+ Arrays.toString(items) + ")"; return Arrays.toString(items); } } // end class Leaf // Contains a left-packed tree of exactly 32-item nodes. private static class Strict<T> implements Node<T> { // This is the number of levels below this node (height) times NODE_LENGTH // For speed, we calculate it as height << NODE_LENGTH_POW_2 // TODO: Can we store shift at the top-level Strict only? final int shift; // These are the child nodes final Node<T>[] nodes; // Constructor Strict(int s, Node<T>[] ns) { shift = s; nodes = ns; // System.out.println(" new Strict" + shift + Arrays.toString(ns)); } /** Returns the high bits which we use to index into our array. This is the simplicity (and speed) of Strict indexing. When everything works, this can be inlined for performance. This could maybe yield a good guess for Relaxed nodes? */ private int highBits(int i) { return i >> shift; } /** Returns the low bits of the index (the part Strict sub-nodes need to know about). This helps make this data structure simple and fast. When everything works, this can be inlined for performance. DO NOT use this for Relaxed nodes - they use subtraction instead! */ private int lowBits(int i) { int shifter = -1 << shift; // System.out.println(" shifter (binary): " + Integer.toBinaryString(shift)); int invShifter = ~shifter; // System.out.println(" invShifter (binary): " + Integer.toBinaryString(invShifter)); // System.out.println(" i (binary): " + Integer.toBinaryString(invShifter)); return i & invShifter; // System.out.println(" subNodeIdx (binary): " + Integer.toBinaryString(subNodeIdx)); // System.out.println(" subNodeIdx: " + subNodeIdx); } @Override public T get(int i) { // System.out.println(" Strict.get(" + i + ")"); // Find the node indexed by the high bits (for this height). // Send the low bits on to our sub-nodes. return nodes[highBits(i)].get(lowBits(i)); } @Override public int maxIndex() { int lastNodeIdx = nodes.length - 1; // System.out.println(" Strict.maxIndex()"); // System.out.println(" nodes.length:" + nodes.length); // System.out.println(" shift:" + shift); // System.out.println(" RADIX_NODE_LENGTH:" + RADIX_NODE_LENGTH); // Add up all the full nodes (only the last can be partial) int shiftedLength = lastNodeIdx << shift; // System.out.println(" shifed length:" + shiftedLength); int partialNodeSize = nodes[lastNodeIdx].maxIndex(); // System.out.println(" Remainder:" + partialNodeSize); return shiftedLength + partialNodeSize; } @Override public boolean thisNodeHasCapacity() { return nodes.length < RADIX_NODE_LENGTH; } @Override public boolean hasStrictCapacity() { return thisNodeHasCapacity() || nodes[nodes.length - 1].hasStrictCapacity(); } @Override public boolean hasRelaxedCapacity(int index, int size) { if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { throw new IllegalArgumentException("Bad size: " + size); } // TODO: Very unsure about this implementation! // return highBits(index) == nodes.length - 1; // It has relaxed capacity because a Relaxed node could have up to MAX_NODE_LENGTH nodes and by definition // this Strict node has no more than RADIX_NODE_LENGTH items. return true; } @Override public Tuple2<Node<T>,Node<T>> split() { // System.out.println("Strict.splitAt(" + i + ")"); // TODO: Implement throw new UnsupportedOperationException("Not implemented yet"); } @SuppressWarnings("unchecked") @Override public Node<T> pushFocus(int index, T[] oldFocus) { // System.out.println("Strict pushFocus(" + Arrays.toString(oldFocus) + ", " + index + ")"); // System.out.println(" this: " + this); // If the proper sub-node can take the additional array, let it! int subNodeIndex = highBits(index); // System.out.println(" subNodeIndex: " + subNodeIndex); // It's a strict-compatible addition if the focus being pushed is of // RADIX_NODE_LENGTH and the index it's pushed to falls on the final leaf-node boundary // and the children of this node are leaves and this node is not full. if (oldFocus.length == RADIX_NODE_LENGTH) { if (index == maxIndex()) { Node<T> lastNode = nodes[nodes.length - 1]; if (lastNode.hasStrictCapacity()) { // System.out.println(" Pushing the focus down to a lower-level node with capacity."); Node<T> newNode = lastNode.pushFocus(lowBits(index), oldFocus); Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, nodes.length - 1, Node.class); return new Strict<>(shift, newNodes); } // Regardless of what else happens, we're going to add a new node. Node<T> newNode = new Leaf<>(oldFocus); // Make a skinny branch of a tree by walking up from the leaf node until our // new branch is at the same level as the old one. We have to build evenly // (like hotels in Monopoly) in order to keep the tree balanced. Even height, // but left-packed (the lower indices must all be filled before adding new // nodes to the right). int newShift = NODE_LENGTH_POW_2; // If we've got space in our array, we just have to add skinny-branch nodes up to // the level below ours. But if we don't have space, we have to add a // single-element strict node at the same level as ours here too. int maxShift = (nodes.length < RADIX_NODE_LENGTH) ? shift : shift + 1; // Make the skinny-branch of single-element strict nodes: while (newShift < maxShift) { // System.out.println(" Adding a skinny branch node..."); Node<T>[] newNodes = (Node<T>[]) Array.newInstance(newNode.getClass(), 1); newNodes[0] = newNode; newNode = new Strict<>(newShift, newNodes); newShift += NODE_LENGTH_POW_2; } if ((nodes.length < RADIX_NODE_LENGTH)) { // System.out.println(" Adding a node to the existing array"); Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // This could allow cheap strict inserts on any leaf-node boundary... return new Strict<>(shift, newNodes); } else { // System.out.println(" Adding a level to the Strict tree"); return new Strict(shift + NODE_LENGTH_POW_2, new Node[]{this, newNode}); } } else if ( (shift == NODE_LENGTH_POW_2) && (lowBits(index) == 0) && (nodes.length < RADIX_NODE_LENGTH) ) { // Here we are: // Pushing a RADIX_NODE_LENGTH focus // At the level above the leaf nodes // Inserting *between* existing leaf nodes (or before or after) // Have room for at least one more leaf child // That makes it free and legal to insert a new RADIX_NODE_LENGTH leaf node and still yield a // Strict (as opposed to Relaxed). // Regardless of what else happens, we're going to add a new node. Node<T> newNode = new Leaf<>(oldFocus); Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // This allows cheap strict inserts on any leaf-node boundary... return new Strict<>(shift, newNodes); } } // end if oldFocus.length == RADIX_NODE_LENGTH // Here we're going to yield a Relaxed Radix node, so punt to that (slower) logic. // System.out.println("Yield a Relaxed node."); int[] endIndices = new int[nodes.length]; for (int i = 0; i < endIndices.length; i++) { endIndices[i] = (i + 1) << shift; } // System.out.println("End indices: " + Arrays.toString(endIndices)); return new Relaxed<>(endIndices, nodes).pushFocus(index, oldFocus); } @SuppressWarnings("unchecked") @Override public Node<T> replace(int idx, T t) { // System.out.println(" Strict.get(" + i + ")"); // Find the node indexed by the high bits (for this height). // Send the low bits on to our sub-nodes. int thisNodeIdx = highBits(idx); Node<T> newNode = nodes[thisNodeIdx].replace(lowBits(idx), t); return new Strict<>(shift, replaceInArrayAt(newNode, nodes, thisNodeIdx, Node.class)); } // @Override public Tuple2<Strict<T>,Strict<T>> split() { // Strict<T> right = new Strict<T>(shift, new Strict[0]); // return tup(this, right); // } // @Override public Strict<T> append(T item) { // Node<T> last = nodes[nodes.length - 1]; // if (last.thisNodeHasCapacity()) { // // Make a copy of our node array // Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length); // // Replace the last node with the updated one. // newNodes[nodes.length - 1] = last.append(item); // // Return new, updated node. // return new Strict<>(shift, newNodes); // } // if (nodes.length >= RADIX_NODE_LENGTH) { // throw new UnsupportedOperationException("This I think can only happen to the root node."); // } else { // // Make a larger copy of our node array // Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length + 1); // // Add a new node at the end of it. // newNodes[nodes.length] = new Leaf<>(singleElementArray(item)); // // Return new, updated node. // return new Strict<>(shift, newNodes); // } // } @Override public String toString() { // return "Strict(nodes.length="+ nodes.length + ", shift=" + shift + ")"; return "Strict" + shift + Arrays.toString(nodes); } } // Contains a relaxed tree of nodes that average around 32 items each. private static class Relaxed<T> implements Node<T> { // The max index stored in each sub-node. This is a separate array so it can be retrieved // in a single memory fetch. Note that this is a 1-based index, or really a count, not a // normal zero-based index. final int[] endIndices; // The sub nodes final Node<T>[] nodes; // Constructor Relaxed(int[] is, Node<T>[] ns) { endIndices = is; nodes = ns; // TODO: These are constraint validations to prevent implementation bugs - remove before shipping. if (endIndices.length < 1) { throw new IllegalArgumentException("endIndices.length < 1"); } if (nodes.length < 1) { throw new IllegalArgumentException("nodes.length < 1"); } if (endIndices.length != nodes.length) { throw new IllegalArgumentException("endIndices.length:" + endIndices.length + " != nodes.length:" + nodes.length); } int endIdx = 0; for (int i = 0; i < nodes.length; i++) { endIdx += nodes[i].maxIndex(); if (endIdx != endIndices[i]) { throw new IllegalArgumentException("nodes[" + i + "].maxIndex() was " + nodes[i].maxIndex() + " which is not compatable with endIndices[" + i + "] which was " + endIndices[i] + "\n" + " endIndices: " + Arrays.toString(endIndices) + " nodes: " + Arrays.toString(nodes)); } } } @Override public int maxIndex() { return endIndices[endIndices.length - 1]; } /** Converts the index of an item into the index of the sub-node containing that item. @param index The index of the item in the entire tree @return The index of the branch of the tree (the sub-node and its ancestors) the item resides in. */ private int subNodeIndex(int index) { // Index range: 0 to maxIndex() // Result Range: 0 to startIndices.length // liner interpolation: index/maxIndex() = result/startIndices.length // result = index * startIndices.length / maxIndex(); // int guess = index * startIndices.length / maxIndex(); // int guessedItem = startIndices[guess]; // while (guessedItem > (index + MIN_NODE_LENGTH)) { // guessedItem = startIndices[--guess]; // } // while (guessedItem < index) { // guessedItem = startIndices[++guess]; // } // TODO: This is really slow. Do linear interpolation instead. for (int i = 0; i < endIndices.length; i++) { if (index < endIndices[i]) { return i; } } // For an append just one element beyond the end of the existing data structure, // just try to add it to the last node. This might seem overly permissive to accept // these as inserts or appends without differentiating between the two, but it flows // naturally with this data structure and I think makes it easier to use without // encouraging user programming errors. // Hopefully this still leads to a relatively balanced tree... if (index == endIndices[endIndices.length - 1]) { return endIndices.length - 1; } throw new IllegalStateException("Should be unreachable! index: " + index + " this: " + this.toString()); } /** Converts the index of an item into the index to pass to the sub-node containing that item. @param index The index of the item in the entire tree @param subNodeIndex the index into this node's array of sub-nodes. @return The index to pass to the sub-branch the item resides in */ private int subNodeAdjustedIndex(int index, int subNodeIndex) { return (subNodeIndex == 0) ? index : index - endIndices[subNodeIndex - 1]; } @Override public T get(int index) { // System.out.println(" Relaxed.get(" + index + ")"); int subNodeIndex = subNodeIndex(index); // System.out.println(" subNodeIndex: " + subNodeIndex); // System.out.println(" subNodeAdjustedIndex(index, subNodeIndex): " + subNodeAdjustedIndex(index, subNodeIndex)); return nodes[subNodeIndex].get(subNodeAdjustedIndex(index, subNodeIndex)); } @Override public Tuple2<Node<T>,Node<T>> split() { // System.out.println("Relaxed.splitAt(" + i + ")"); int midpoint = nodes.length >> 1; // Shift-right one is the same as dividing by 2. Relaxed<T> left = new Relaxed<>(Arrays.copyOf(endIndices, midpoint), Arrays.copyOf(nodes, midpoint)); int[] rightEndIndices = new int[nodes.length - midpoint]; int leftEndIdx = endIndices[midpoint - 1]; for (int j = 0; j < rightEndIndices.length; j++) { rightEndIndices[j] = endIndices[midpoint + j] - leftEndIdx; } // I checked this at javaRepl and indeed this starts from the correct item. Relaxed<T> right = new Relaxed<>(rightEndIndices, Arrays.copyOfRange(nodes, midpoint, nodes.length)); return tup(left, right); } // @Override public Node<T> append(T item) { // Node<T> last = nodes[nodes.length - 1]; // if (last.thisNodeHasCapacity()) { // // Make a copy of our node array // Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length); // // Replace the last node with the updated one. // newNodes[nodes.length - 1] = last.append(item); // // Return new, updated node. // return new Relaxed<>(endIndices, newNodes); // } // if (nodes.length >= MAX_NODE_LENGTH) { // throw new UnsupportedOperationException("This I think can only happen to the root node."); // } else { // // Make a larger copy of our node array // Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length + 1); // // Split the last node into two. (Shift-right one is the same as dividing by 2.) // Tuple2<? extends Node<T>,? extends Node<T>> splitNodes = last.split(); // // Put the left split node where the old node was // newNodes[nodes.length - 1] = splitNodes._1(); // // Append the item to the right node and add that at the new end position. // newNodes[nodes.length] = splitNodes._2().append(item); // // Return new, updated node. // return new Relaxed<>(endIndices, newNodes); // } // } @Override public boolean thisNodeHasCapacity() { // System.out.println("thisNodeHasCapacity(): nodes.length=" + nodes.length + " MAX_NODE_LENGTH=" + MAX_NODE_LENGTH + " MIN_NODE_LENGTH=" + MIN_NODE_LENGTH + " RADIX_NODE_LENGTH=" + RADIX_NODE_LENGTH); return nodes.length < MAX_NODE_LENGTH; } // I don't think this should ever be called. Should this throw an exception instead? @Override public boolean hasStrictCapacity() { throw new UnsupportedOperationException("I don't think this should ever be called."); // return false; } @Override public boolean hasRelaxedCapacity(int index, int size) { if ( (size < MIN_NODE_LENGTH) || (size > MAX_NODE_LENGTH) ) { throw new IllegalArgumentException("Bad size: " + size); } if (thisNodeHasCapacity()) { return true; } int subNodeIndex = subNodeIndex(index); return nodes[subNodeIndex].hasRelaxedCapacity(subNodeAdjustedIndex(index, subNodeIndex), size); } @Override public Node<T> pushFocus(int index, T[] oldFocus) { // System.out.println("Relaxed pushFocus(" + Arrays.toString(oldFocus) + ", " + index + ")"); // System.out.println(" this: " + this); int subNodeIndex = subNodeIndex(index); Node<T> subNode = nodes[subNodeIndex]; // System.out.println(" subNode: " + subNode); int subNodeAdjustedIndex = subNodeAdjustedIndex(index, subNodeIndex); // Does the subNode have space enough to handle it? if (subNode.hasRelaxedCapacity(subNodeAdjustedIndex, oldFocus.length)) { // System.out.println(" Pushing the focus down to a lower-level node with capacity."); Node<T> newNode = subNode.pushFocus(subNodeAdjustedIndex, oldFocus); // Make a copy of our nodesArray, replacing the old node at subNodeIndex with the new. Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, subNodeIndex, Node.class); // Increment endIndicies for the changed item and all items to the right. int[] newEndIndices = new int[endIndices.length]; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex); } for (int i = subNodeIndex; i < endIndices.length; i++) { newEndIndices[i] = endIndices[i] + oldFocus.length; } return new Relaxed<>(newEndIndices, newNodes); } // I think this is a root node thing. if (!thisNodeHasCapacity()) { // TODO: Figure out optimal place to split // For now, split at half of maxIndex. Tuple2<Node<T>,Node<T>> split = split(); Node<T> node1 = split._1(); Node<T> node2 = split._2(); // System.out.println("Split node1: " + node1); // System.out.println("Split node2: " + node2); Relaxed<T> newRelaxed = new Relaxed<>(new int[] {node1.maxIndex(), node1.maxIndex() + node2.maxIndex()}, (Node<T>[]) new Node[] {node1, node2}); // System.out.println("newRelaxed3: " + newRelaxed); return newRelaxed.pushFocus(index, oldFocus); } if (subNode instanceof Leaf) { // System.out.println("Leaf!"); if (subNodeAdjustedIndex == 0) { // Just add a new leaf Leaf<T> newNode = new Leaf<>(oldFocus); Node<T>[] newNodes = insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // Increment endIndicies for the changed item and all items to the right. int[] newEndIndices = new int[endIndices.length + 1]; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); } newEndIndices[subNodeIndex] = oldFocus.length; for (int i = subNodeIndex + 1; i < newEndIndices.length; i++) { newEndIndices[i] = endIndices[i - 1] + oldFocus.length; } Relaxed<T> newRelaxed = new Relaxed<>(newEndIndices, newNodes); // System.out.println("newRelaxed1: " + newRelaxed); return newRelaxed; } // if (subNodeAdjustedIndex == subNode.maxIndex()) { // // // DIFFERENT: // subNodeIndex++; // // END DIFFERENT // // // Just add a new leaf // Leaf<T> newNode = new Leaf<>(oldFocus); // // // TODO: Copied from above. // Node<T>[] newNodes = replaceInArrayAt(newNode, nodes, subNodeIndex, Node.class); // // Increment endIndicies for the changed item and all items to the right. // int[] newEndIndices = new int[endIndices.length]; // if (subNodeIndex > 0) { // System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); // } // for (int i = subNodeIndex; i < endIndices.length; i++) { // newEndIndices[i] = endIndices[i] + oldFocus.length; // } // return new Relaxed<>(newEndIndices, newNodes); // // TODO: END Copied from above. // // } throw new UnsupportedOperationException("Not implemented yet"); } // Here we have capacity and it's not a leaf, so we have to split the appropriate sub-node. // int prevNodeMaxIdx = endIndices[(subNodeIndex > 0) ? subNodeIndex - 1 // : 0]; // int newIdx = index - prevNodeMaxIdx; // For now, split at half of maxIndex. // System.out.println("About to split: " + subNode); // System.out.println("Split at: " + (subNode.maxIndex() >> 1)); Tuple2<Node<T>,Node<T>> newSubNode = subNode.split(); Node<T> node1 = newSubNode._1(); Node<T> node2 = newSubNode._2(); // System.out.println("Split node1: " + node1); // System.out.println("Split node2: " + node2); Node<T>[] newNodes = (Node<T>[]) new Node[nodes.length + 1]; // If we aren't inserting at the first item, array-copy the nodes before the insert // point. if (subNodeIndex > 0) { System.arraycopy(nodes, 0, newNodes, 0, subNodeIndex); } // Insert the new item. newNodes[subNodeIndex] = node1; newNodes[subNodeIndex + 1] = node2; // If we aren't inserting at the last item, array-copy the nodes after the insert // point. if (subNodeIndex < nodes.length) { System.arraycopy(nodes, subNodeIndex + 1, newNodes, subNodeIndex + 2, nodes.length - subNodeIndex - 1); } int[] newEndIndices = new int[endIndices.length + 1]; int prevEndIdx = 0; if (subNodeIndex > 0) { System.arraycopy(endIndices, 0, newEndIndices, 0, subNodeIndex - 1); prevEndIdx = endIndices[subNodeIndex - 1]; } for (int i = subNodeIndex; i < newEndIndices.length; i++) { // TODO: Calculate instead of loading into memory. See splitAt calculation above. prevEndIdx += newNodes[i].maxIndex(); newEndIndices[i] = prevEndIdx; } Relaxed<T> newRelaxed = new Relaxed<>(newEndIndices, newNodes); // System.out.println("newRelaxed2: " + newRelaxed); return newRelaxed.pushFocus(index, oldFocus); // // // Regardless of what else happens, we're going to add a new node. // Node<T> newNode = new Leaf<>(oldFocus); // // // Make a skinny branch of a tree by walking up from the leaf node until our // // new branch is at the same level as the old one. We have to build evenly // // (like hotels in Monopoly) in order to keep the tree balanced. // int newHeight = 0; // // // If we've got space in our array, we just have to add skinny-branch nodes up to // // the level below ours. But if we don't have space, we have to add a // // single-element strict node at the same level as ours here too. // int maxHeight = (nodes.length < MAX_NODE_LENGTH) ? height : height + 1; // // // Make the skinny-branch of single-element strict nodes: // while (newHeight < maxHeight) { // // System.out.println(" Adding a skinny branch node..."); // Node<T>[] newNodes = (Node<T>[]) Array.newInstance(newNode.getClass(), 1); // newNodes[0] = newNode; // int[] newEndIndices = new int[] { oldFocus.length }; // newNode = new Relaxed<>(newHeight, newEndIndices, newNodes); // newHeight++; // } // // if ((nodes.length < RADIX_NODE_LENGTH)) { // // System.out.println(" Adding a node to the existing array"); // Node<T>[] newNodes = (Node<T>[]) insertIntoArrayAt(newNode, nodes, subNodeIndex, Node.class); // // This could allow cheap strict inserts on any leaf-node boundary... // return new Strict<>(shift, newNodes); // } else { // // System.out.println(" Adding a level to the Strict tree"); // return new Strict(shift + NODE_LENGTH_POW_2, // new Node[]{this, newNode}); // } // TODO: Not finished - working here! // System.out.println(" oldFocus.length: " + oldFocus.length); // System.out.println(" index: " + index); // System.out.println(" maxIndex(): " + maxIndex()); // System.out.println(" nodes.length: " + nodes.length); // System.out.println(" this: " + this); // // TODO: Implement // throw new UnsupportedOperationException("Not Implemented Yet"); } @Override public Node<T> replace(int idx, T t) { // TODO: Implement throw new UnsupportedOperationException("Not Implemented Yet"); } @Override public String toString() { return "Relaxed(endIndicies=" + Arrays.toString(endIndices) + " nodes=" + Arrays.toString(nodes).replaceAll(", Relaxed\\(", ",\n Relaxed(") + ")"; // return "Relaxed(nodes.length="+ nodes.length + ")"; } } }
Just cleaned up some old code and comments.
src/main/java/org/organicdesign/fp/experimental/RrbTree1.java
Just cleaned up some old code and comments.
<ide><path>rc/main/java/org/organicdesign/fp/experimental/RrbTree1.java <ide> T get(int i); <ide> /** Highest index returnable by this node */ <ide> int maxIndex(); <del> /** Inserts an item at the given index */ <del>// @Override public Node<T> insert(int i, T item); <del>// Node<T> append(T item); <ide> /** Returns true if this node's array is not full */ <ide> boolean thisNodeHasCapacity(); <ide> /** Returns true if this strict-Radix tree can take another 32 items. */ <ide> Leaf(T[] ts) { items = ts; } <ide> @Override public T get(int i) { return items[i]; } <ide> @Override public int maxIndex() { return items.length; } <del>// @Override public Node<T> append(T item) { <del>// T[] newItems = Arrays.copyOf(items, items.length + 1); <del>// newItems[items.length] = item; <del>// return new Leaf<>(newItems); <del>// } <ide> // If we want to add one more to an existing leaf node, it must already be part of a <ide> // relaxed tree. <ide> @Override public boolean thisNodeHasCapacity() { <ide> // return tup(this, right); <ide> // } <ide> <del>// @Override public Strict<T> append(T item) { <del>// Node<T> last = nodes[nodes.length - 1]; <del>// if (last.thisNodeHasCapacity()) { <del>// // Make a copy of our node array <del>// Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length); <del>// // Replace the last node with the updated one. <del>// newNodes[nodes.length - 1] = last.append(item); <del>// // Return new, updated node. <del>// return new Strict<>(shift, newNodes); <del>// } <del>// if (nodes.length >= RADIX_NODE_LENGTH) { <del>// throw new UnsupportedOperationException("This I think can only happen to the root node."); <del>// } else { <del>// // Make a larger copy of our node array <del>// Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length + 1); <del>// // Add a new node at the end of it. <del>// newNodes[nodes.length] = new Leaf<>(singleElementArray(item)); <del>// // Return new, updated node. <del>// return new Strict<>(shift, newNodes); <del>// } <del>// } <ide> @Override public String toString() { <ide> // return "Strict(nodes.length="+ nodes.length + ", shift=" + shift + ")"; <ide> return "Strict" + shift + Arrays.toString(nodes); <ide> return tup(left, right); <ide> } <ide> <del>// @Override public Node<T> append(T item) { <del>// Node<T> last = nodes[nodes.length - 1]; <del>// if (last.thisNodeHasCapacity()) { <del>// // Make a copy of our node array <del>// Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length); <del>// // Replace the last node with the updated one. <del>// newNodes[nodes.length - 1] = last.append(item); <del>// // Return new, updated node. <del>// return new Relaxed<>(endIndices, newNodes); <del>// } <del>// if (nodes.length >= MAX_NODE_LENGTH) { <del>// throw new UnsupportedOperationException("This I think can only happen to the root node."); <del>// } else { <del>// // Make a larger copy of our node array <del>// Node<T>[] newNodes = Arrays.copyOf(nodes, nodes.length + 1); <del>// // Split the last node into two. (Shift-right one is the same as dividing by 2.) <del>// Tuple2<? extends Node<T>,? extends Node<T>> splitNodes = last.split(); <del>// // Put the left split node where the old node was <del>// newNodes[nodes.length - 1] = splitNodes._1(); <del>// // Append the item to the right node and add that at the new end position. <del>// newNodes[nodes.length] = splitNodes._2().append(item); <del>// // Return new, updated node. <del>// return new Relaxed<>(endIndices, newNodes); <del>// } <del>// } <del> <ide> @Override public boolean thisNodeHasCapacity() { <ide> // System.out.println("thisNodeHasCapacity(): nodes.length=" + nodes.length + " MAX_NODE_LENGTH=" + MAX_NODE_LENGTH + " MIN_NODE_LENGTH=" + MIN_NODE_LENGTH + " RADIX_NODE_LENGTH=" + RADIX_NODE_LENGTH); <ide> return nodes.length < MAX_NODE_LENGTH; <ide> } <ide> <ide> @Override public Node<T> pushFocus(int index, T[] oldFocus) { <del>// System.out.println("Relaxed pushFocus(" + Arrays.toString(oldFocus) + ", " + index + ")"); <add>// System.out.println("Relaxed pushFocus(" + index + ", " + Arrays.toString(oldFocus) + ")"); <ide> // System.out.println(" this: " + this); <ide> <ide> int subNodeIndex = subNodeIndex(index); <ide> // <ide> // } <ide> <add> // TODO: Implement! <ide> throw new UnsupportedOperationException("Not implemented yet"); <del> } <add> } // end if subNode instanceof Leaf <ide> <ide> // Here we have capacity and it's not a leaf, so we have to split the appropriate sub-node. <ide> <ide> return "Relaxed(endIndicies=" + Arrays.toString(endIndices) + " nodes=" + Arrays.toString(nodes).replaceAll(", Relaxed\\(", ",\n Relaxed(") + ")"; <ide> // return "Relaxed(nodes.length="+ nodes.length + ")"; <ide> } <del> } <del>} <add> } // end class Relaxed <add>} // end class RrbTree
JavaScript
bsd-3-clause
d7c54056ae34690346280f126bed6f23287fc33f
0
UndefinedOffset/silverstripe-keyboardshortcuts,UndefinedOffset/silverstripe-keyboardshortcuts
(function() { //TinyMCE will stop loading if it encounters non-existent external script file when included through tiny_mce_gzip.php. Only load the external lang package if it is available. var availableLangs=['en', 'de']; if(jQuery.inArray(tinymce.settings.language, availableLangs)!=-1) { tinymce.PluginManager.requireLangPack('sskeyboardshortcuts'); } tinymce.create('tinymce.plugins.ssKeyboardShortcuts', { init: function(ed, url) { //ed.addShortcut('ctrl+shift+k', ed.getLang('sskeyboardshortcuts.insertlink', 0), 'sslink'); //Insert a link ed.addShortcut('alt+shift+k', ed.getLang('sskeyboardshortcuts.unlink', 0), 'unlink'); //Unlink ed.addShortcut('ctrl+shift+m', ed.getLang('sskeyboardshortcuts.insertmedia', 0), 'ssmedia'); //Insert Media ed.addShortcut('ctrl+shift+l', ed.getLang('sskeyboardshortcuts.insertbullets', 0), 'InsertUnorderedList'); //Insert or Remove Bulleted List ed.addShortcut('ctrl+l', ed.getLang('sskeyboardshortcuts.insertnumbers', 0), 'InsertOrderedList'); //Insert or Remove Numbered List }, getInfo: function() { return { longname: ed.getLang('sskeyboardshortcuts.longdesc', 0), author: 'UndefinedOffset', authorurl: 'http://www.edchipman.ca', infourl: 'https://github.com/UndefinedOffset/silverstripe-keyboardshortcuts', version: "1.0" }; } }); // Register plugin tinymce.PluginManager.add('sskeyboardshortcuts', tinymce.plugins.ssKeyboardShortcuts); })();
javascript/tinymce/editor_plugin_src.js
(function() { //TinyMCE will stop loading if it encounters non-existent external script file when included through tiny_mce_gzip.php. Only load the external lang package if it is available. var availableLangs=['en', 'de']; if(jQuery.inArray(tinymce.settings.language, availableLangs)!=-1) { tinymce.PluginManager.requireLangPack('sskeyboardshortcuts'); } tinymce.create('tinymce.plugins.ssKeyboardShortcuts', { init: function(ed, url) { //ed.addShortcut('ctrl+shift+k', ed.getLang('sskeyboardshortcuts.insertlink', 0), 'sslink'); //Insert a link ed.addShortcut('alt+shift+k', ed.getLang('sskeyboardshortcuts.unlink', 0), 'unlink'); //Unlink ed.addShortcut('ctrl+shift+m', ed.getLang('sskeyboardshortcuts.insertmedia', 0), 'ssmedia'); //Insert Media ed.addShortcut('ctrl+shift+l', ed.getLang('sskeyboardshortcuts.insertbullets', 0), 'InsertUnorderedList'); //Insert or Remove Bulleted List ed.addShortcut('ctrl+l', ed.getLang('sskeyboardshortcuts.insertnumbers', 0), 'InsertOrderedList'); //Insert or Remove Numbered List //Bind the mousetrap events ed.once('init', function() { jQuery(ed.getElement()).entwine('ss').editorinit(); }); }, getInfo: function() { return { longname: ed.getLang('sskeyboardshortcuts.longdesc', 0), author: 'UndefinedOffset', authorurl: 'http://www.edchipman.ca', infourl: 'https://github.com/UndefinedOffset/silverstripe-keyboardshortcuts', version: "1.0" }; } }); // Register plugin tinymce.PluginManager.add('sskeyboardshortcuts', tinymce.plugins.ssKeyboardShortcuts); })();
Fixed error when loading the editor plugin
javascript/tinymce/editor_plugin_src.js
Fixed error when loading the editor plugin
<ide><path>avascript/tinymce/editor_plugin_src.js <ide> ed.addShortcut('ctrl+shift+l', ed.getLang('sskeyboardshortcuts.insertbullets', 0), 'InsertUnorderedList'); //Insert or Remove Bulleted List <ide> <ide> ed.addShortcut('ctrl+l', ed.getLang('sskeyboardshortcuts.insertnumbers', 0), 'InsertOrderedList'); //Insert or Remove Numbered List <del> <del> //Bind the mousetrap events <del> ed.once('init', function() { <del> jQuery(ed.getElement()).entwine('ss').editorinit(); <del> }); <ide> }, <ide> getInfo: function() { <ide> return {
Java
epl-1.0
cb6fd7676c31ccae1f13077a48bc2e36e67089b4
0
Johnson-Chou/test,my76128/controller,mandeepdhami/controller,tx1103mark/controller,aryantaheri/monitoring-controller,mandeepdhami/controller,aryantaheri/controller,inocybe/odl-controller,mandeepdhami/controller,aryantaheri/controller,aryantaheri/monitoring-controller,violinlakshmi/opendaylight,opendaylight/controller,mandeepdhami/controller,aryantaheri/monitoring-controller,aryantaheri/monitoring-controller,violinlakshmi/opendaylight,my76128/controller,tx1103mark/controller,tx1103mark/controller,aryantaheri/controller,tx1103mark/controller,violinlakshmi/opendaylight,inocybe/odl-controller,Sushma7785/OpenDayLight-Load-Balancer,my76128/controller,my76128/controller,522986491/controller,Sushma7785/OpenDayLight-Load-Balancer,522986491/controller,Johnson-Chou/test
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.forwardingrulesmanager.internal; import java.io.FileNotFoundException; import java.io.IOException; import java.io.ObjectInputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import org.eclipse.osgi.framework.console.CommandInterpreter; import org.eclipse.osgi.framework.console.CommandProvider; import org.opendaylight.controller.clustering.services.CacheConfigException; import org.opendaylight.controller.clustering.services.CacheExistException; import org.opendaylight.controller.clustering.services.ICacheUpdateAware; import org.opendaylight.controller.clustering.services.IClusterContainerServices; import org.opendaylight.controller.clustering.services.IClusterServices; import org.opendaylight.controller.configuration.IConfigurationContainerAware; import org.opendaylight.controller.connectionmanager.IConnectionManager; import org.opendaylight.controller.forwardingrulesmanager.FlowConfig; import org.opendaylight.controller.forwardingrulesmanager.FlowEntry; import org.opendaylight.controller.forwardingrulesmanager.FlowEntryInstall; import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManager; import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManagerAware; import org.opendaylight.controller.forwardingrulesmanager.PortGroup; import org.opendaylight.controller.forwardingrulesmanager.PortGroupChangeListener; import org.opendaylight.controller.forwardingrulesmanager.PortGroupConfig; import org.opendaylight.controller.forwardingrulesmanager.PortGroupProvider; import org.opendaylight.controller.forwardingrulesmanager.implementation.data.FlowEntryDistributionOrder; import org.opendaylight.controller.sal.action.Action; import org.opendaylight.controller.sal.action.ActionType; import org.opendaylight.controller.sal.action.Controller; import org.opendaylight.controller.sal.action.Flood; import org.opendaylight.controller.sal.action.Output; import org.opendaylight.controller.sal.action.PopVlan; import org.opendaylight.controller.sal.core.ContainerFlow; import org.opendaylight.controller.sal.core.IContainer; import org.opendaylight.controller.sal.core.IContainerListener; import org.opendaylight.controller.sal.core.Node; import org.opendaylight.controller.sal.core.NodeConnector; import org.opendaylight.controller.sal.core.Property; import org.opendaylight.controller.sal.core.UpdateType; import org.opendaylight.controller.sal.flowprogrammer.Flow; import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener; import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService; import org.opendaylight.controller.sal.match.Match; import org.opendaylight.controller.sal.match.MatchType; import org.opendaylight.controller.sal.utils.EtherTypes; import org.opendaylight.controller.sal.utils.GlobalConstants; import org.opendaylight.controller.sal.utils.HexEncode; import org.opendaylight.controller.sal.utils.IObjectReader; import org.opendaylight.controller.sal.utils.IPProtocols; import org.opendaylight.controller.sal.utils.NodeConnectorCreator; import org.opendaylight.controller.sal.utils.NodeCreator; import org.opendaylight.controller.sal.utils.ObjectReader; import org.opendaylight.controller.sal.utils.ObjectWriter; import org.opendaylight.controller.sal.utils.Status; import org.opendaylight.controller.sal.utils.StatusCode; import org.opendaylight.controller.switchmanager.IInventoryListener; import org.opendaylight.controller.switchmanager.ISwitchManager; import org.opendaylight.controller.switchmanager.ISwitchManagerAware; import org.opendaylight.controller.switchmanager.Subnet; import org.osgi.framework.BundleContext; import org.osgi.framework.FrameworkUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class that manages forwarding rule installation and removal per container of * the network. It also maintains the central repository of all the forwarding * rules installed on the network nodes. */ public class ForwardingRulesManager implements IForwardingRulesManager, PortGroupChangeListener, IContainerListener, ISwitchManagerAware, IConfigurationContainerAware, IInventoryListener, IObjectReader, ICacheUpdateAware, CommandProvider, IFlowProgrammerListener { private static final String NODEDOWN = "Node is Down"; private static final String SUCCESS = StatusCode.SUCCESS.toString(); private static final Logger log = LoggerFactory.getLogger(ForwardingRulesManager.class); private static final String PORTREMOVED = "Port removed"; private static final Logger logsync = LoggerFactory.getLogger("FRMsync"); private String frmFileName; private String portGroupFileName; private ConcurrentMap<Integer, FlowConfig> staticFlows; private ConcurrentMap<Integer, Integer> staticFlowsOrdinal; private ConcurrentMap<String, PortGroupConfig> portGroupConfigs; private ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>> portGroupData; private ConcurrentMap<String, Object> TSPolicies; private boolean inContainerMode; // being used by global instance only private boolean stopping; /* * Flow database. It's the software view of what was requested to install * and what is installed on the switch. It is indexed by the entry itself. * The entry's hashcode resumes the network node index, the flow's priority * and the flow's match. The value element is a class which contains the * flow entry pushed by the applications modules and the respective * container flow merged version. In absence of container flows, the two * flow entries are the same. */ private ConcurrentMap<FlowEntry, FlowEntry> originalSwView; private ConcurrentMap<FlowEntryInstall, FlowEntryInstall> installedSwView; /* * Per node and per group indexing */ private ConcurrentMap<Node, List<FlowEntryInstall>> nodeFlows; private ConcurrentMap<String, List<FlowEntryInstall>> groupFlows; /* * Inactive flow list. This is for the global instance of FRM It will * contain all the flow entries which were installed on the global container * when the first container is created. */ private ConcurrentMap<FlowEntry, FlowEntry> inactiveFlows; private IContainer container; private Set<IForwardingRulesManagerAware> frmAware = Collections.synchronizedSet(new HashSet<IForwardingRulesManagerAware>()); private PortGroupProvider portGroupProvider; private IFlowProgrammerService programmer; private IClusterContainerServices clusterContainerService = null; private ISwitchManager switchManager; private Thread frmEventHandler; protected BlockingQueue<FRMEvent> pendingEvents; // Distributes FRM programming in the cluster private IConnectionManager connectionManager; /* * Name clustered caches used to support FRM entry distribution these are by * necessity non-transactional as long as need to be able to synchronize * states also while a transaction is in progress */ static final String WORKORDERCACHE = "frm.workOrder"; static final String WORKSTATUSCACHE = "frm.workStatus"; /* * Data structure responsible for distributing the FlowEntryInstall requests * in the cluster. The key value is entry that is being either Installed or * Updated or Delete. The value field is the same of the key value in case * of Installation or Deletion, it's the new entry in case of Modification, * this because the clustering caches don't allow null values. * * The logic behind this data structure is that the controller that initiate * the request will place the order here, someone will pick it and then will * remove from this data structure because is being served. * * TODO: We need to have a way to cleanup this data structure if entries are * not picked by anyone, which is always a case can happen especially on * Node disconnect cases. */ private ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall> workOrder; /* * Data structure responsible for retrieving the results of the workOrder * submitted to the cluster. * * The logic behind this data structure is that the controller that has * executed the order will then place the result in workStatus signaling * that there was a success or a failure. * * TODO: The workStatus entries need to have a lifetime associated in case * of requestor controller leaving the cluster. */ private ConcurrentMap<FlowEntryDistributionOrder, Status> workStatus; /* * Local Map used to hold the Future which a caller can use to monitor for * completion */ private ConcurrentMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask> workMonitor = new ConcurrentHashMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask>(); /* * Create an executor pool to create the distributionOrder, this is a stop * gap solution caused by an issue with non-transactional caches in the * implementation we use, being currently worked on. It has been noticed in * fact that when non-transactional caches are being used sometime the key * are no distributed to all the nodes properly. To workaround the issue * transactional caches are being used, but there was a reason for using * non-transactional caches to start with, in fact we needed to be able in * the context of a northbound transaction to program the FRM entries * irrespective of the fact that transaction would commit or no else we * would not be able to achieve the entry programming and implement the * scheme for recovery from network element failures. Bottom line, now in * order to make sure an update on a transactional cache goes out while in a * transaction that need to be initiated by a different thread. */ private ExecutorService executor; class DistributeOrderCallable implements Callable<Future<Status>> { private FlowEntryInstall e; private FlowEntryInstall u; private UpdateType t; DistributeOrderCallable(FlowEntryInstall e, FlowEntryInstall u, UpdateType t) { this.e = e; this.u = u; this.t = t; } @Override public Future<Status> call() throws Exception { if (e == null || t == null) { logsync.error("Unexpected null Entry up update type"); return null; } // Create the work order and distribute it FlowEntryDistributionOrder fe = new FlowEntryDistributionOrder(e, t, clusterContainerService.getMyAddress()); // First create the monitor job FlowEntryDistributionOrderFutureTask ret = new FlowEntryDistributionOrderFutureTask(fe); logsync.trace("Node {} not local so sending fe {}", e.getNode(), fe); workMonitor.put(fe, ret); if (t.equals(UpdateType.CHANGED)) { // Then distribute the work workOrder.put(fe, u); } else { // Then distribute the work workOrder.put(fe, e); } logsync.trace("WorkOrder requested"); // Now create an Handle to monitor the execution of the operation return ret; } } /** * @param e * Entry being installed/updated/removed * @param u * New entry will be placed after the update operation. Valid * only for UpdateType.CHANGED, null for all the other cases * @param t * Type of update * @return a Future object for monitoring the progress of the result, or * null in case the processing should take place locally */ private Future<Status> distributeWorkOrder(FlowEntryInstall e, FlowEntryInstall u, UpdateType t) { // A null entry it's an unexpected condition, anyway it's safe to keep // the handling local if (e == null) { return null; } Node n = e.getNode(); if (!connectionManager.isLocal(n)) { Callable<Future<Status>> worker = new DistributeOrderCallable(e, u, t); if (worker != null) { Future<Future<Status>> workerRes = this.executor.submit(worker); try { return workerRes.get(); } catch (InterruptedException e1) { // we where interrupted, not a big deal. return null; } catch (ExecutionException e1) { logsync.error( "We got an execution exception {} we cannot much, so returning we don't have nothing to wait for", e); return null; } } } logsync.trace("LOCAL Node {} so processing Entry:{} UpdateType:{}", n, e, t); return null; } /** * Adds a flow entry onto the network node It runs various validity checks * and derive the final container flows merged entries that will be * attempted to be installed * * @param flowEntry * the original flow entry application requested to add * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status addEntry(FlowEntry flowEntry, boolean async) { // Sanity Check if (flowEntry == null || flowEntry.getNode() == null) { String msg = "Invalid FlowEntry"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } /* * Derive the container flow merged entries to install In presence of N * container flows, we may end up with N different entries to install... */ List<FlowEntryInstall> toInstallList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows()); // Container Flow conflict Check if (toInstallList.isEmpty()) { String msg = "Flow Entry conflicts with all Container Flows"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.CONFLICT, msg); } // Derive the list of entries good to be installed List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>(); for (FlowEntryInstall entry : toInstallList) { // Conflict Check: Verify new entry would not overwrite existing // ones if (this.installedSwView.containsKey(entry)) { log.warn("Operation Rejected: A flow with same match and priority exists on the target node"); log.trace("Aborting to install {}", entry); continue; } toInstallSafe.add(entry); } // Declare failure if all the container flow merged entries clash with // existing entries if (toInstallSafe.size() == 0) { String msg = "A flow with same match and priority exists on the target node"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.CONFLICT, msg); } // Try to install an entry at the time Status error = new Status(null, null); Status succeded = null; boolean oneSucceded = false; for (FlowEntryInstall installEntry : toInstallSafe) { // Install and update database Status ret = addEntriesInternal(installEntry, async); if (ret.isSuccess()) { oneSucceded = true; /* * The first successful status response will be returned For the * asynchronous call, we can discard the container flow * complication for now and assume we will always deal with one * flow only per request */ succeded = ret; } else { error = ret; log.warn("Failed to install the entry: {}. The failure is: {}", installEntry, ret.getDescription()); } } return (oneSucceded) ? succeded : error; } /** * Given a flow entry and the list of container flows, it returns the list * of container flow merged flow entries good to be installed on this * container. If the list of container flows is null or empty, the install * entry list will contain only one entry, the original flow entry. If the * flow entry is congruent with all the N container flows, then the output * install entry list will contain N entries. If the output list is empty, * it means the passed flow entry conflicts with all the container flows. * * @param cFlowList * The list of container flows * @return the list of container flow merged entries good to be installed on * this container */ private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request, List<ContainerFlow> cFlowList) { List<FlowEntryInstall> toInstallList = new ArrayList<FlowEntryInstall>(1); if (container.getContainerFlows() == null || container.getContainerFlows().isEmpty()) { // No container flows => entry good to be installed unchanged toInstallList.add(new FlowEntryInstall(request.clone(), null)); } else { // Create the list of entries to be installed. If the flow entry is // not congruent with any container flow, no install entries will be // created for (ContainerFlow cFlow : container.getContainerFlows()) { if (cFlow.allowsFlow(request.getFlow())) { toInstallList.add(new FlowEntryInstall(request.clone(), cFlow)); } } } return toInstallList; } /** * Modify a flow entry with a new one It runs various validity check and * derive the final container flows merged flow entries to work with * * @param currentFlowEntry * @param newFlowEntry * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status modifyEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry, boolean async) { Status retExt; // Sanity checks if (currentFlowEntry == null || currentFlowEntry.getNode() == null || newFlowEntry == null || newFlowEntry.getNode() == null) { String msg = "Modify: Invalid FlowEntry"; String logMsg = msg + ": {} or {}"; log.warn(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (!currentFlowEntry.getNode().equals(newFlowEntry.getNode()) || !currentFlowEntry.getFlowName().equals(newFlowEntry.getFlowName())) { String msg = "Modify: Incompatible Flow Entries"; String logMsg = msg + ": {} and {}"; log.warn(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Equality Check if (currentFlowEntry.getFlow().equals(newFlowEntry.getFlow())) { String msg = "Modify skipped as flows are the same"; String logMsg = msg + ": {} and {}"; log.debug(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.SUCCESS, msg); } /* * Conflict Check: Verify the new entry would not conflict with an * existing one. This is a loose check on the previous original flow * entry requests. No check on the container flow merged flow entries * (if any) yet */ FlowEntry sameMatchOriginalEntry = originalSwView.get(newFlowEntry); if (sameMatchOriginalEntry != null && !sameMatchOriginalEntry.equals(currentFlowEntry)) { String msg = "Operation Rejected: Another flow with same match and priority exists on the target node"; String logMsg = msg + ": {}"; log.warn(logMsg, currentFlowEntry); return new Status(StatusCode.CONFLICT, msg); } // Derive the installed and toInstall entries List<FlowEntryInstall> installedList = deriveInstallEntries(currentFlowEntry.clone(), container.getContainerFlows()); List<FlowEntryInstall> toInstallList = deriveInstallEntries(newFlowEntry.clone(), container.getContainerFlows()); if (toInstallList.isEmpty()) { String msg = "Modify Operation Rejected: The new entry conflicts with all the container flows"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowEntry); log.warn(msg); return new Status(StatusCode.CONFLICT, msg); } /* * If the two list sizes differ, it means the new flow entry does not * satisfy the same number of container flows the current entry does. * This is only possible when the new entry and current entry have * different match. In this scenario the modification would ultimately * be handled as a remove and add operations in the protocol plugin. * * Also, if any of the new flow entries would clash with an existing * one, we cannot proceed with the modify operation, because it would * fail for some entries and leave stale entries on the network node. * Modify path can be taken only if it can be performed completely, for * all entries. * * So, for the above two cases, to simplify, let's decouple the modify * in: 1) remove current entries 2) install new entries */ Status succeeded = null; boolean decouple = false; if (installedList.size() != toInstallList.size()) { log.info("Modify: New flow entry does not satisfy the same " + "number of container flows as the original entry does"); decouple = true; } List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>(); for (FlowEntryInstall installEntry : toInstallList) { /* * Conflict Check: Verify the new entry would not overwrite another * existing one */ FlowEntryInstall sameMatchEntry = installedSwView.get(installEntry); if (sameMatchEntry != null && !sameMatchEntry.getOriginal().equals(currentFlowEntry)) { log.info("Modify: new container flow merged flow entry clashes with existing flow"); decouple = true; } else { toInstallSafe.add(installEntry); } } if (decouple) { // Remove current entries for (FlowEntryInstall currEntry : installedList) { this.removeEntryInternal(currEntry, async); } // Install new entries for (FlowEntryInstall newEntry : toInstallSafe) { succeeded = this.addEntriesInternal(newEntry, async); } } else { /* * The two list have the same size and the entries to install do not * clash with any existing flow on the network node. We assume here * (and might be wrong) that the same container flows that were * satisfied by the current entries are the same that are satisfied * by the new entries. Let's take the risk for now. * * Note: modification has to be complete. If any entry modification * fails, we need to stop, restore the already modified entries, and * declare failure. */ Status retModify = null; int i = 0; int size = toInstallList.size(); while (i < size) { // Modify and update database retModify = modifyEntryInternal(installedList.get(i), toInstallList.get(i), async); if (retModify.isSuccess()) { i++; } else { break; } } // Check if uncompleted modify if (i < size) { log.warn("Unable to perform a complete modify for all the container flows merged entries"); // Restore original entries int j = 0; while (j < i) { log.info("Attempting to restore initial entries"); retExt = modifyEntryInternal(toInstallList.get(i), installedList.get(i), async); if (retExt.isSuccess()) { j++; } else { break; } } // Fatal error, recovery failed if (j < i) { String msg = "Flow recovery failed ! Unrecoverable Error"; log.error(msg); return new Status(StatusCode.INTERNALERROR, msg); } } succeeded = retModify; } /* * The first successful status response will be returned. For the * asynchronous call, we can discard the container flow complication for * now and assume we will always deal with one flow only per request */ return succeeded; } /** * This is the function that modifies the final container flows merged * entries on the network node and update the database. It expects that all * the validity checks are passed * * @param currentEntries * @param newEntries * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status modifyEntryInternal(FlowEntryInstall currentEntries, FlowEntryInstall newEntries, boolean async) { Future<Status> futureStatus = distributeWorkOrder(currentEntries, newEntries, UpdateType.CHANGED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Modify the flow on the network node Status status = async ? programmer.modifyFlowAsync(currentEntries.getNode(), currentEntries.getInstall() .getFlow(), newEntries.getInstall() .getFlow()) : programmer.modifyFlow(currentEntries.getNode(), currentEntries.getInstall() .getFlow(), newEntries.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", newEntries.getInstall(), status.getDescription()); return status; } log.trace("Modified {} => {}", currentEntries.getInstall(), newEntries.getInstall()); // Update DB newEntries.setRequestId(status.getRequestId()); updateLocalDatabase(currentEntries, false); updateLocalDatabase(newEntries, true); return status; } } /** * Remove a flow entry. If the entry is not present in the software view * (entry or node not present), it return successfully * * @param flowEntry * the flow entry to remove * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status removeEntry(FlowEntry flowEntry, boolean async) { Status error = new Status(null, null); // Sanity Check if (flowEntry == null || flowEntry.getNode() == null) { String msg = "Invalid FlowEntry"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Derive the container flows merged installed entries List<FlowEntryInstall> installedList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows()); Status succeeded = null; boolean atLeastOneRemoved = false; for (FlowEntryInstall entry : installedList) { if (!installedSwView.containsKey(entry)) { String logMsg = "Removal skipped (not present in software view) for flow entry: {}"; log.debug(logMsg, flowEntry); if (installedList.size() == 1) { // If we had only one entry to remove, we are done return new Status(StatusCode.SUCCESS); } else { continue; } } // Remove and update DB Status ret = removeEntryInternal(entry, async); if (!ret.isSuccess()) { error = ret; log.warn("Failed to remove the entry: {}. The failure is: {}", entry.getInstall(), ret.getDescription()); if (installedList.size() == 1) { // If we had only one entry to remove, this is fatal failure return error; } } else { succeeded = ret; atLeastOneRemoved = true; } } /* * No worries if full removal failed. Consistency checker will take care * of removing the stale entries later, or adjusting the software * database if not in sync with hardware */ return (atLeastOneRemoved) ? succeeded : error; } /** * This is the function that removes the final container flows merged entry * from the network node and update the database. It expects that all the * validity checks are passed * * @param entry * the flow entry to remove * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status removeEntryInternal(FlowEntryInstall entry, boolean async) { Future<Status> futureStatus = distributeWorkOrder(entry, null, UpdateType.REMOVED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Mark the entry to be deleted (for CC just in case we fail) entry.toBeDeleted(); // Remove from node Status status = async ? programmer.removeFlowAsync(entry.getNode(), entry.getInstall() .getFlow()) : programmer.removeFlow(entry.getNode(), entry.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(), status.getDescription()); return status; } log.trace("Removed {}", entry.getInstall()); // Update DB updateLocalDatabase(entry, false); return status; } } /** * This is the function that installs the final container flow merged entry * on the network node and updates the database. It expects that all the * validity and conflict checks are passed. That means it does not check * whether this flow would conflict or overwrite an existing one. * * @param entry * the flow entry to install * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status addEntriesInternal(FlowEntryInstall entry, boolean async) { Future<Status> futureStatus = distributeWorkOrder(entry, null, UpdateType.ADDED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Install the flow on the network node Status status = async ? programmer.addFlowAsync(entry.getNode(), entry.getInstall() .getFlow()) : programmer.addFlow(entry.getNode(), entry.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(), status.getDescription()); return status; } log.trace("Added {}", entry.getInstall()); // Update DB entry.setRequestId(status.getRequestId()); updateLocalDatabase(entry, true); return status; } } /** * Returns true if the flow conflicts with all the container's flows. This * means that if the function returns true, the passed flow entry is * congruent with at least one container flow, hence it is good to be * installed on this container. * * @param flowEntry * @return true if flow conflicts with all the container flows, false * otherwise */ private boolean entryConflictsWithContainerFlows(FlowEntry flowEntry) { List<ContainerFlow> cFlowList = container.getContainerFlows(); // Validity check and avoid unnecessary computation // Also takes care of default container where no container flows are // present if (cFlowList == null || cFlowList.isEmpty()) { return false; } for (ContainerFlow cFlow : cFlowList) { if (cFlow.allowsFlow(flowEntry.getFlow())) { // Entry is allowed by at least one container flow: good to go return false; } } return true; } private void updateLocalDatabase(FlowEntryInstall entry, boolean add) { // Update the software view updateSwViewes(entry, add); // Update node indexed flow database updateNodeFlowsDB(entry, add); // Update group indexed flow database updateGroupFlowsDB(entry, add); } /* * Update the node mapped flows database */ private void updateSwViewes(FlowEntryInstall flowEntries, boolean add) { if (add) { originalSwView.put(flowEntries.getOriginal(), flowEntries.getOriginal()); installedSwView.put(flowEntries, flowEntries); } else { originalSwView.remove(flowEntries.getOriginal()); installedSwView.remove(flowEntries); } } /* * Update the node mapped flows database */ private void updateNodeFlowsDB(FlowEntryInstall flowEntries, boolean add) { Node node = flowEntries.getNode(); List<FlowEntryInstall> nodeIndeces = this.nodeFlows.get(node); if (nodeIndeces == null) { if (!add) { return; } else { nodeIndeces = new ArrayList<FlowEntryInstall>(); } } if (add) { nodeIndeces.add(flowEntries); } else { nodeIndeces.remove(flowEntries); } // Update cache across cluster if (nodeIndeces.isEmpty()) { this.nodeFlows.remove(node); } else { this.nodeFlows.put(node, nodeIndeces); } } /* * Update the group name mapped flows database */ private void updateGroupFlowsDB(FlowEntryInstall flowEntries, boolean add) { String groupName = flowEntries.getGroupName(); // Flow may not be part of a group if (groupName == null) { return; } List<FlowEntryInstall> indices = this.groupFlows.get(groupName); if (indices == null) { if (!add) { return; } else { indices = new ArrayList<FlowEntryInstall>(); } } if (add) { indices.add(flowEntries); } else { indices.remove(flowEntries); } // Update cache across cluster if (indices.isEmpty()) { this.groupFlows.remove(groupName); } else { this.groupFlows.put(groupName, indices); } } /** * Remove a flow entry that has been added previously First checks if the * entry is effectively present in the local database */ @SuppressWarnings("unused") private Status removeEntry(Node node, String flowName) { FlowEntryInstall target = null; // Find in database for (FlowEntryInstall entry : installedSwView.values()) { if (entry.equalsByNodeAndName(node, flowName)) { target = entry; break; } } // If it is not there, stop any further processing if (target == null) { return new Status(StatusCode.SUCCESS, "Entry is not present"); } // Remove from node Status status = programmer.removeFlow(target.getNode(), target.getInstall().getFlow()); // Update DB if (status.isSuccess()) { updateLocalDatabase(target, false); } else { // log the error log.warn("SDN Plugin failed to remove the flow: {}. The failure is: {}", target.getInstall(), status.getDescription()); } return status; } @Override public Status installFlowEntry(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = addEntry(flowEntry, false); } else { String msg = "Controller in container mode: Install Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, flowEntry); } return status; } @Override public Status installFlowEntryAsync(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = addEntry(flowEntry, true); } else { String msg = "Controller in container mode: Install Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } @Override public Status uninstallFlowEntry(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = removeEntry(flowEntry, false); } else { String msg = "Controller in container mode: Uninstall Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, flowEntry); } return status; } @Override public Status uninstallFlowEntryAsync(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = removeEntry(flowEntry, true); } else { String msg = "Controller in container mode: Uninstall Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } @Override public Status modifyFlowEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) { Status status = null; if (isContainerModeAllowed(currentFlowEntry)) { status = modifyEntry(currentFlowEntry, newFlowEntry, false); } else { String msg = "Controller in container mode: Modify Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, newFlowEntry); } return status; } @Override public Status modifyFlowEntryAsync(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) { Status status = null; if (isContainerModeAllowed(currentFlowEntry)) { status = modifyEntry(currentFlowEntry, newFlowEntry, true); } else { String msg = "Controller in container mode: Modify Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } /** * Returns whether the specified flow entry is allowed to be * installed/removed/modified based on the current container mode status. * This call always returns true in the container instance of forwarding * rules manager. It is meant for the global instance only (default * container) of forwarding rules manager. Idea is that for assuring * container isolation of traffic, flow installation in default container is * blocked when in container mode (containers are present). The only flows * that are allowed in container mode in the default container are the * proactive flows, the ones automatically installed on the network node * which forwarding mode has been configured to "proactive". These flows are * needed by controller to discover the nodes topology and to discover the * attached hosts for some SDN switches. * * @param flowEntry * The flow entry to be installed/removed/modified * @return true if not in container mode or if flowEntry is internally * generated */ private boolean isContainerModeAllowed(FlowEntry flowEntry) { return (!inContainerMode) ? true : flowEntry.isInternal(); } @Override public Status modifyOrAddFlowEntry(FlowEntry newFlowEntry) { /* * Run a check on the original entries to decide whether to go with a * add or modify method. A loose check means only check against the * original flow entry requests and not against the installed flow * entries which are the result of the original entry merged with the * container flow(s) (if any). The modifyFlowEntry method in presence of * conflicts with the Container flows (if any) would revert back to a * delete + add pattern */ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry); if (currentFlowEntry != null) { return modifyFlowEntry(currentFlowEntry, newFlowEntry); } else { return installFlowEntry(newFlowEntry); } } @Override public Status modifyOrAddFlowEntryAsync(FlowEntry newFlowEntry) { /* * Run a check on the original entries to decide whether to go with a * add or modify method. A loose check means only check against the * original flow entry requests and not against the installed flow * entries which are the result of the original entry merged with the * container flow(s) (if any). The modifyFlowEntry method in presence of * conflicts with the Container flows (if any) would revert back to a * delete + add pattern */ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry); if (currentFlowEntry != null) { return modifyFlowEntryAsync(currentFlowEntry, newFlowEntry); } else { return installFlowEntryAsync(newFlowEntry); } } @Override public Status uninstallFlowEntryGroup(String groupName) { if (groupName == null || groupName.isEmpty()) { return new Status(StatusCode.BADREQUEST, "Invalid group name"); } if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) { return new Status(StatusCode.BADREQUEST, "Internal static flows group cannot be deleted through this api"); } if (inContainerMode) { String msg = "Controller in container mode: Group Uninstall Refused"; String logMsg = msg + ": {}"; log.warn(logMsg, groupName); return new Status(StatusCode.NOTACCEPTABLE, msg); } int toBeRemoved = 0; String error = ""; if (groupFlows.containsKey(groupName)) { List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName)); toBeRemoved = list.size(); for (FlowEntryInstall entry : list) { Status status = this.removeEntry(entry.getOriginal(), false); if (status.isSuccess()) { toBeRemoved -= 1; } else { error = status.getDescription(); } } } return (toBeRemoved == 0) ? new Status(StatusCode.SUCCESS) : new Status(StatusCode.INTERNALERROR, "Not all the flows were removed: " + error); } @Override public Status uninstallFlowEntryGroupAsync(String groupName) { if (groupName == null || groupName.isEmpty()) { return new Status(StatusCode.BADREQUEST, "Invalid group name"); } if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) { return new Status(StatusCode.BADREQUEST, "Static flows group cannot be deleted through this api"); } if (inContainerMode) { String msg = "Controller in container mode: Group Uninstall Refused"; String logMsg = msg + ": {}"; log.warn(logMsg, groupName); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (groupFlows.containsKey(groupName)) { List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName)); for (FlowEntryInstall entry : list) { this.removeEntry(entry.getOriginal(), true); } } return new Status(StatusCode.SUCCESS); } @Override public boolean checkFlowEntryConflict(FlowEntry flowEntry) { return entryConflictsWithContainerFlows(flowEntry); } /** * Updates all installed flows because the container flow got updated This * is obtained in two phases on per node basis: 1) Uninstall of all flows 2) * Reinstall of all flows This is needed because a new container flows * merged flow may conflict with an existing old container flows merged flow * on the network node */ private void updateFlowsContainerFlow() { Set<FlowEntry> toReInstall = new HashSet<FlowEntry>(); // First remove all installed entries for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> entry : installedSwView.entrySet()) { FlowEntryInstall current = entry.getValue(); // Store the original entry toReInstall.add(current.getOriginal()); // Remove the old couples. No validity checks to be run, use the // internal remove this.removeEntryInternal(current, false); } // Then reinstall the original entries for (FlowEntry entry : toReInstall) { // Reinstall the original flow entries, via the regular path: new // cFlow merge + validations this.installFlowEntry(entry); } } private void nonClusterObjectCreate() { originalSwView = new ConcurrentHashMap<FlowEntry, FlowEntry>(); installedSwView = new ConcurrentHashMap<FlowEntryInstall, FlowEntryInstall>(); nodeFlows = new ConcurrentHashMap<Node, List<FlowEntryInstall>>(); groupFlows = new ConcurrentHashMap<String, List<FlowEntryInstall>>(); TSPolicies = new ConcurrentHashMap<String, Object>(); staticFlowsOrdinal = new ConcurrentHashMap<Integer, Integer>(); portGroupConfigs = new ConcurrentHashMap<String, PortGroupConfig>(); portGroupData = new ConcurrentHashMap<PortGroupConfig, Map<Node, PortGroup>>(); staticFlows = new ConcurrentHashMap<Integer, FlowConfig>(); inactiveFlows = new ConcurrentHashMap<FlowEntry, FlowEntry>(); } private void registerWithOSGIConsole() { BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass()).getBundleContext(); bundleContext.registerService(CommandProvider.class.getName(), this, null); } @Override public void setTSPolicyData(String policyname, Object o, boolean add) { if (add) { /* Check if this policy already exists */ if (!(TSPolicies.containsKey(policyname))) { TSPolicies.put(policyname, o); } } else { TSPolicies.remove(policyname); } if (frmAware != null) { synchronized (frmAware) { for (IForwardingRulesManagerAware frma : frmAware) { try { frma.policyUpdate(policyname, add); } catch (Exception e) { log.warn("Exception on callback", e); } } } } } @Override public Map<String, Object> getTSPolicyData() { return TSPolicies; } @Override public Object getTSPolicyData(String policyName) { if (TSPolicies.containsKey(policyName)) { return TSPolicies.get(policyName); } else { return null; } } @Override public List<FlowEntry> getFlowEntriesForGroup(String policyName) { List<FlowEntry> list = new ArrayList<FlowEntry>(); if (policyName != null && !policyName.trim().isEmpty()) { for (Map.Entry<FlowEntry, FlowEntry> entry : this.originalSwView.entrySet()) { if (policyName.equals(entry.getKey().getGroupName())) { list.add(entry.getKey().clone()); } } } return list; } @Override public List<FlowEntry> getInstalledFlowEntriesForGroup(String policyName) { List<FlowEntry> list = new ArrayList<FlowEntry>(); if (policyName != null && !policyName.trim().isEmpty()) { for (Map.Entry<FlowEntryInstall, FlowEntryInstall> entry : this.installedSwView.entrySet()) { if (policyName.equals(entry.getKey().getGroupName())) { list.add(entry.getKey().getInstall().clone()); } } } return list; } @Override public void addOutputPort(Node node, String flowName, List<NodeConnector> portList) { for (FlowEntryInstall flow : this.nodeFlows.get(node)) { if (flow.getFlowName().equals(flowName)) { FlowEntry currentFlowEntry = flow.getOriginal(); FlowEntry newFlowEntry = currentFlowEntry.clone(); for (NodeConnector dstPort : portList) { newFlowEntry.getFlow().addAction(new Output(dstPort)); } Status error = modifyEntry(currentFlowEntry, newFlowEntry, false); if (error.isSuccess()) { log.info("Ports {} added to FlowEntry {}", portList, flowName); } else { log.warn("Failed to add ports {} to Flow entry {}. The failure is: {}", portList, currentFlowEntry.toString(), error.getDescription()); } return; } } log.warn("Failed to add ports to Flow {} on Node {}: Entry Not Found", flowName, node); } @Override public void removeOutputPort(Node node, String flowName, List<NodeConnector> portList) { for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { FlowEntry currentFlowEntry = flow.getOriginal(); FlowEntry newFlowEntry = currentFlowEntry.clone(); for (NodeConnector dstPort : portList) { Action action = new Output(dstPort); newFlowEntry.getFlow().removeAction(action); } Status status = modifyEntry(currentFlowEntry, newFlowEntry, false); if (status.isSuccess()) { log.info("Ports {} removed from FlowEntry {}", portList, flowName); } else { log.warn("Failed to remove ports {} from Flow entry {}. The failure is: {}", portList, currentFlowEntry.toString(), status.getDescription()); } return; } } log.warn("Failed to remove ports from Flow {} on Node {}: Entry Not Found", flowName, node); } /* * This function assumes the target flow has only one output port */ @Override public void replaceOutputPort(Node node, String flowName, NodeConnector outPort) { FlowEntry currentFlowEntry = null; FlowEntry newFlowEntry = null; // Find the flow for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { currentFlowEntry = flow.getOriginal(); break; } } if (currentFlowEntry == null) { log.warn("Failed to replace output port for flow {} on node {}: Entry Not Found", flowName, node); return; } // Create a flow copy with the new output port newFlowEntry = currentFlowEntry.clone(); Action target = null; for (Action action : newFlowEntry.getFlow().getActions()) { if (action.getType() == ActionType.OUTPUT) { target = action; break; } } newFlowEntry.getFlow().removeAction(target); newFlowEntry.getFlow().addAction(new Output(outPort)); // Modify on network node Status status = modifyEntry(currentFlowEntry, newFlowEntry, false); if (status.isSuccess()) { log.info("Output port replaced with {} for flow {} on node {}", outPort, flowName, node); } else { log.warn("Failed to replace output port for flow {} on node {}. The failure is: {}", flowName, node, status.getDescription()); } return; } @Override public NodeConnector getOutputPort(Node node, String flowName) { for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { for (Action action : flow.getOriginal().getFlow().getActions()) { if (action.getType() == ActionType.OUTPUT) { return ((Output) action).getPort(); } } } } return null; } private void cacheStartup() { allocateCaches(); retrieveCaches(); } @SuppressWarnings("deprecation") private void allocateCaches() { if (this.clusterContainerService == null) { log.warn("Un-initialized clusterContainerService, can't create cache"); return; } log.debug("Allocating caches for Container {}", container.getName()); try { clusterContainerService.createCache("frm.originalSwView", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.installedSwView", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.inactiveFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.nodeFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.groupFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.staticFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.flowsSaveEvent", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.staticFlowsOrdinal", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.portGroupConfigs", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.portGroupData", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.TSPolicies", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache(WORKSTATUSCACHE, EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache(WORKORDERCACHE, EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); } catch (CacheConfigException cce) { log.error("CacheConfigException"); } catch (CacheExistException cce) { log.error("CacheExistException"); } } @SuppressWarnings({ "unchecked", "deprecation" }) private void retrieveCaches() { ConcurrentMap<?, ?> map; if (this.clusterContainerService == null) { log.warn("un-initialized clusterContainerService, can't retrieve cache"); nonClusterObjectCreate(); return; } log.debug("Retrieving Caches for Container {}", container.getName()); map = clusterContainerService.getCache("frm.originalSwView"); if (map != null) { originalSwView = (ConcurrentMap<FlowEntry, FlowEntry>) map; } else { log.error("Retrieval of frm.originalSwView cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.installedSwView"); if (map != null) { installedSwView = (ConcurrentMap<FlowEntryInstall, FlowEntryInstall>) map; } else { log.error("Retrieval of frm.installedSwView cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.inactiveFlows"); if (map != null) { inactiveFlows = (ConcurrentMap<FlowEntry, FlowEntry>) map; } else { log.error("Retrieval of frm.inactiveFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.nodeFlows"); if (map != null) { nodeFlows = (ConcurrentMap<Node, List<FlowEntryInstall>>) map; } else { log.error("Retrieval of cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.groupFlows"); if (map != null) { groupFlows = (ConcurrentMap<String, List<FlowEntryInstall>>) map; } else { log.error("Retrieval of frm.groupFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.staticFlows"); if (map != null) { staticFlows = (ConcurrentMap<Integer, FlowConfig>) map; } else { log.error("Retrieval of frm.staticFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.staticFlowsOrdinal"); if (map != null) { staticFlowsOrdinal = (ConcurrentMap<Integer, Integer>) map; } else { log.error("Retrieval of frm.staticFlowsOrdinal cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.portGroupConfigs"); if (map != null) { portGroupConfigs = (ConcurrentMap<String, PortGroupConfig>) map; } else { log.error("Retrieval of frm.portGroupConfigs cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.portGroupData"); if (map != null) { portGroupData = (ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>>) map; } else { log.error("Retrieval of frm.portGroupData allocation failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.TSPolicies"); if (map != null) { TSPolicies = (ConcurrentMap<String, Object>) map; } else { log.error("Retrieval of frm.TSPolicies cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache(WORKORDERCACHE); if (map != null) { workOrder = (ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall>) map; } else { log.error("Retrieval of " + WORKORDERCACHE + " cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache(WORKSTATUSCACHE); if (map != null) { workStatus = (ConcurrentMap<FlowEntryDistributionOrder, Status>) map; } else { log.error("Retrieval of " + WORKSTATUSCACHE + " cache failed for Container {}", container.getName()); } } private boolean flowConfigExists(FlowConfig config) { // Flow name has to be unique on per node id basis for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(config)) { return true; } } return false; } @Override public Status addStaticFlow(FlowConfig config) { // Configuration object validation Status status = config.validate(container); if (!status.isSuccess()) { log.warn("Invalid Configuration for flow {}. The failure is {}", config, status.getDescription()); String error = "Invalid Configuration (" + status.getDescription() + ")"; config.setStatus(error); return new Status(StatusCode.BADREQUEST, error); } return addStaticFlowInternal(config, false); } /** * Private method to add a static flow configuration which does not run any * validation on the passed FlowConfig object. If restore is set to true, * configuration is stored in configuration database regardless the * installation on the network node was successful. This is useful at boot * when static flows are present in startup configuration and are read * before the switches connects. * * @param config * The static flow configuration * @param restore * if true, the configuration is stored regardless the * installation on the network node was successful * @return The status of this request */ private Status addStaticFlowInternal(FlowConfig config, boolean restore) { boolean multipleFlowPush = false; String error; Status status; config.setStatus(SUCCESS); // Presence check if (flowConfigExists(config)) { error = "Entry with this name on specified switch already exists"; log.warn("Entry with this name on specified switch already exists: {}", config); config.setStatus(error); return new Status(StatusCode.CONFLICT, error); } if ((config.getIngressPort() == null) && config.getPortGroup() != null) { for (String portGroupName : portGroupConfigs.keySet()) { if (portGroupName.equalsIgnoreCase(config.getPortGroup())) { multipleFlowPush = true; break; } } if (!multipleFlowPush) { log.warn("Invalid Configuration(Invalid PortGroup Name) for flow {}", config); error = "Invalid Configuration (Invalid PortGroup Name)"; config.setStatus(error); return new Status(StatusCode.BADREQUEST, error); } } /* * If requested program the entry in hardware first before updating the * StaticFlow DB */ if (!multipleFlowPush) { // Program hw if (config.installInHw()) { FlowEntry entry = config.getFlowEntry(); status = this.installFlowEntry(entry); if (!status.isSuccess()) { config.setStatus(status.getDescription()); if (!restore) { return status; } } } } /* * When the control reaches this point, either of the following * conditions is true 1. This is a single entry configuration (non * PortGroup) and the hardware installation is successful 2. This is a * multiple entry configuration (PortGroup) and hardware installation is * NOT done directly on this event. 3. The User prefers to retain the * configuration in Controller and skip hardware installation. * * Hence it is safe to update the StaticFlow DB at this point. * * Note : For the case of PortGrouping, it is essential to have this DB * populated before the PortGroupListeners can query for the DB * triggered using portGroupChanged event... */ Integer ordinal = staticFlowsOrdinal.get(0); staticFlowsOrdinal.put(0, ++ordinal); staticFlows.put(ordinal, config); if (multipleFlowPush) { PortGroupConfig pgconfig = portGroupConfigs.get(config.getPortGroup()); Map<Node, PortGroup> existingData = portGroupData.get(pgconfig); if (existingData != null) { portGroupChanged(pgconfig, existingData, true); } } return new Status(StatusCode.SUCCESS); } private void addStaticFlowsToSwitch(Node node) { for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.getNode().equals(node)) { if (config.installInHw() && !config.getStatus().equals(SUCCESS)) { Status status = this.installFlowEntryAsync(config.getFlowEntry()); config.setStatus(status.getDescription()); } } } // Update cluster cache refreshClusterStaticFlowsStatus(node); } private void updateStaticFlowConfigsOnNodeDown(Node node) { log.trace("Updating Static Flow configs on node down: {}", node); List<Integer> toRemove = new ArrayList<Integer>(); for (Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.installInHw() && config.getNode().equals(node)) { if (config.isInternalFlow()) { // Take note of this controller generated static flow toRemove.add(entry.getKey()); } else { config.setStatus(NODEDOWN); } } } // Remove controller generated static flows for this node for (Integer index : toRemove) { staticFlows.remove(index); } // Update cluster cache refreshClusterStaticFlowsStatus(node); } private void updateStaticFlowConfigsOnContainerModeChange(UpdateType update) { log.trace("Updating Static Flow configs on container mode change: {}", update); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.installInHw() && !config.isInternalFlow()) { switch (update) { case ADDED: config.setStatus("Removed from node because in container mode"); break; case REMOVED: config.setStatus(SUCCESS); break; default: } } } // Update cluster cache refreshClusterStaticFlowsStatus(null); } @Override public Status removeStaticFlow(FlowConfig config) { /* * No config.isInternal() check as NB does not take this path and GUI * cannot issue a delete on an internal generated flow. We need this * path to be accessible when switch mode is changed from proactive to * reactive, so that we can remove the internal generated LLDP and ARP * punt flows */ // Look for the target configuration entry Integer key = 0; FlowConfig target = null; for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(config)) { key = entry.getKey(); target = entry.getValue(); break; } } if (target == null) { return new Status(StatusCode.NOTFOUND, "Entry Not Present"); } // Program the network node Status status = this.uninstallFlowEntry(config.getFlowEntry()); // Update configuration database if programming was successful if (status.isSuccess()) { staticFlows.remove(key); } return status; } @Override public Status removeStaticFlow(String name, Node node) { // Look for the target configuration entry Integer key = 0; FlowConfig target = null; for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) { if (mapEntry.getValue().isByNameAndNodeIdEqual(name, node)) { key = mapEntry.getKey(); target = mapEntry.getValue(); break; } } if (target == null) { return new Status(StatusCode.NOTFOUND, "Entry Not Present"); } // Validity check for api3 entry point if (target.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be deleted"; String logMsg = msg + ": {}"; log.warn(logMsg, name); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (target.isPortGroupEnabled()) { String msg = "Invalid operation: Port Group flows cannot be deleted through this API"; String logMsg = msg + ": {}"; log.warn(logMsg, name); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Program the network node Status status = this.removeEntry(target.getFlowEntry(), false); // Update configuration database if programming was successful if (status.isSuccess()) { staticFlows.remove(key); } return status; } @Override public Status modifyStaticFlow(FlowConfig newFlowConfig) { // Validity check for api3 entry point if (newFlowConfig.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be modified"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowConfig); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Validity Check Status status = newFlowConfig.validate(container); if (!status.isSuccess()) { String msg = "Invalid Configuration (" + status.getDescription() + ")"; newFlowConfig.setStatus(msg); log.warn("Invalid Configuration for flow {}. The failure is {}", newFlowConfig, status.getDescription()); return new Status(StatusCode.BADREQUEST, msg); } FlowConfig oldFlowConfig = null; Integer index = null; for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) { FlowConfig entry = mapEntry.getValue(); if (entry.isByNameAndNodeIdEqual(newFlowConfig.getName(), newFlowConfig.getNode())) { oldFlowConfig = entry; index = mapEntry.getKey(); break; } } if (oldFlowConfig == null) { String msg = "Attempt to modify a non existing static flow"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowConfig); return new Status(StatusCode.NOTFOUND, msg); } // Do not attempt to reinstall the flow, warn user if (newFlowConfig.equals(oldFlowConfig)) { String msg = "No modification detected"; log.info("Static flow modification skipped. New flow and old flow are the same: {}", newFlowConfig); return new Status(StatusCode.SUCCESS, msg); } // If flow is installed, program the network node status = new Status(StatusCode.SUCCESS, "Saved in config"); if (oldFlowConfig.installInHw()) { status = this.modifyFlowEntry(oldFlowConfig.getFlowEntry(), newFlowConfig.getFlowEntry()); } // Update configuration database if programming was successful if (status.isSuccess()) { newFlowConfig.setStatus(status.getDescription()); staticFlows.put(index, newFlowConfig); } return status; } @Override public Status toggleStaticFlowStatus(String name, Node node) { return toggleStaticFlowStatus(getStaticFlow(name, node)); } @Override public Status toggleStaticFlowStatus(FlowConfig config) { if (config == null) { String msg = "Invalid request: null flow config"; log.warn(msg); return new Status(StatusCode.BADREQUEST, msg); } // Validity check for api3 entry point if (config.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be modified"; String logMsg = msg + ": {}"; log.warn(logMsg, config); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Find the config entry Integer key = 0; FlowConfig target = null; for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig conf = entry.getValue(); if (conf.isByNameAndNodeIdEqual(config)) { key = entry.getKey(); target = conf; break; } } if (target != null) { // Program the network node Status status = (target.installInHw()) ? this.uninstallFlowEntry(target.getFlowEntry()) : this .installFlowEntry(target.getFlowEntry()); if (status.isSuccess()) { // Update Configuration database target.setStatus(SUCCESS); target.toggleInstallation(); staticFlows.put(key, target); } return status; } return new Status(StatusCode.NOTFOUND, "Unable to locate the entry. Failed to toggle status"); } /** * Reinsert all static flows entries in the cache to force cache updates in * the cluster. This is useful when only some parameters were changed in the * entries, like the status. * * @param node * The node for which the static flow configurations have to be * refreshed. If null, all nodes static flows will be refreshed. */ private void refreshClusterStaticFlowsStatus(Node node) { // Refresh cluster cache for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (node == null || entry.getValue().getNode().equals(node)) { staticFlows.put(entry.getKey(), entry.getValue()); } } } /** * Uninstall all the non-internal Flow Entries present in the software view. * If requested, a copy of each original flow entry will be stored in the * inactive list so that it can be re-applied when needed (This is typically * the case when running in the default container and controller moved to * container mode) * * @param preserveFlowEntries * if true, a copy of each original entry is stored in the * inactive list */ private void uninstallAllFlowEntries(boolean preserveFlowEntries) { log.info("Uninstalling all non-internal flows"); List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(); // Store entries / create target list for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> mapEntry : installedSwView.entrySet()) { FlowEntryInstall flowEntries = mapEntry.getValue(); // Skip internal generated static flows if (!flowEntries.isInternal()) { toRemove.add(flowEntries); // Store the original entries if requested if (preserveFlowEntries) { inactiveFlows.put(flowEntries.getOriginal(), flowEntries.getOriginal()); } } } // Now remove the entries for (FlowEntryInstall flowEntryHw : toRemove) { Status status = this.removeEntryInternal(flowEntryHw, false); if (!status.isSuccess()) { log.warn("Failed to remove entry: {}. The failure is: {}", flowEntryHw, status.getDescription()); } } } /** * Re-install all the Flow Entries present in the inactive list The inactive * list will be empty at the end of this call This function is called on the * default container instance of FRM only when the last container is deleted */ private void reinstallAllFlowEntries() { log.info("Reinstalling all inactive flows"); for (FlowEntry flowEntry : this.inactiveFlows.keySet()) { this.addEntry(flowEntry, false); } // Empty inactive list in any case inactiveFlows.clear(); } @Override public List<FlowConfig> getStaticFlows() { return getStaticFlowsOrderedList(staticFlows, staticFlowsOrdinal.get(0).intValue()); } // TODO: need to come out with a better algorithm for maintaining the order // of the configuration entries // with actual one, index associated to deleted entries cannot be reused and // map grows... private List<FlowConfig> getStaticFlowsOrderedList(ConcurrentMap<Integer, FlowConfig> flowMap, int maxKey) { List<FlowConfig> orderedList = new ArrayList<FlowConfig>(); for (int i = 0; i <= maxKey; i++) { FlowConfig entry = flowMap.get(i); if (entry != null) { orderedList.add(entry); } } return orderedList; } @Override public FlowConfig getStaticFlow(String name, Node node) { for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(name, node)) { return entry.getValue(); } } return null; } @Override public List<FlowConfig> getStaticFlows(Node node) { List<FlowConfig> list = new ArrayList<FlowConfig>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().onNode(node)) { list.add(entry.getValue()); } } return list; } @Override public List<String> getStaticFlowNamesForNode(Node node) { List<String> list = new ArrayList<String>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().onNode(node)) { list.add(entry.getValue().getName()); } } return list; } @Override public List<Node> getListNodeWithConfiguredFlows() { Set<Node> set = new HashSet<Node>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { set.add(entry.getValue().getNode()); } return new ArrayList<Node>(set); } @SuppressWarnings("unchecked") private void loadFlowConfiguration() { ObjectReader objReader = new ObjectReader(); ConcurrentMap<Integer, FlowConfig> confList = (ConcurrentMap<Integer, FlowConfig>) objReader.read(this, frmFileName); ConcurrentMap<String, PortGroupConfig> pgConfig = (ConcurrentMap<String, PortGroupConfig>) objReader.read(this, portGroupFileName); if (pgConfig != null) { for (ConcurrentMap.Entry<String, PortGroupConfig> entry : pgConfig.entrySet()) { addPortGroupConfig(entry.getKey(), entry.getValue().getMatchString(), true); } } if (confList == null) { return; } int maxKey = 0; for (Integer key : confList.keySet()) { if (key.intValue() > maxKey) { maxKey = key.intValue(); } } for (FlowConfig conf : getStaticFlowsOrderedList(confList, maxKey)) { addStaticFlowInternal(conf, true); } } @Override public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { return ois.readObject(); } @Override public Status saveConfig() { return saveConfigInternal(); } private Status saveConfigInternal() { ObjectWriter objWriter = new ObjectWriter(); ConcurrentMap<Integer, FlowConfig> nonDynamicFlows = new ConcurrentHashMap<Integer, FlowConfig>(); for (Integer ordinal : staticFlows.keySet()) { FlowConfig config = staticFlows.get(ordinal); // Do not save dynamic and controller generated static flows if (config.isDynamic() || config.isInternalFlow()) { continue; } nonDynamicFlows.put(ordinal, config); } objWriter.write(nonDynamicFlows, frmFileName); objWriter.write(new ConcurrentHashMap<String, PortGroupConfig>(portGroupConfigs), portGroupFileName); return new Status(StatusCode.SUCCESS, null); } @Override public void subnetNotify(Subnet sub, boolean add) { } private void installImplicitARPReplyPunt(Node node) { if (node == null) { return; } List<String> puntAction = new ArrayList<String>(); puntAction.add(ActionType.CONTROLLER.toString()); FlowConfig allowARP = new FlowConfig(); allowARP.setInstallInHw(true); allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP Reply" + FlowConfig.INTERNALSTATICFLOWEND); allowARP.setPriority("500"); allowARP.setNode(node); allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase()); allowARP.setDstMac(HexEncode.bytesToHexString(switchManager.getControllerMAC())); allowARP.setActions(puntAction); addStaticFlowInternal(allowARP, true); // skip validation on internal static flow name } @Override public void modeChangeNotify(Node node, boolean proactive) { List<FlowConfig> defaultConfigs = new ArrayList<FlowConfig>(); List<String> puntAction = new ArrayList<String>(); puntAction.add(ActionType.CONTROLLER.toString()); FlowConfig allowARP = new FlowConfig(); allowARP.setInstallInHw(true); allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP" + FlowConfig.INTERNALSTATICFLOWEND); allowARP.setPriority("1"); allowARP.setNode(node); allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase()); allowARP.setActions(puntAction); defaultConfigs.add(allowARP); FlowConfig allowLLDP = new FlowConfig(); allowLLDP.setInstallInHw(true); allowLLDP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt LLDP" + FlowConfig.INTERNALSTATICFLOWEND); allowLLDP.setPriority("1"); allowLLDP.setNode(node); allowLLDP.setEtherType("0x" + Integer.toHexString(EtherTypes.LLDP.intValue()).toUpperCase()); allowLLDP.setActions(puntAction); defaultConfigs.add(allowLLDP); List<String> dropAction = new ArrayList<String>(); dropAction.add(ActionType.DROP.toString()); FlowConfig dropAllConfig = new FlowConfig(); dropAllConfig.setInstallInHw(true); dropAllConfig.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Catch-All Drop" + FlowConfig.INTERNALSTATICFLOWEND); dropAllConfig.setPriority("0"); dropAllConfig.setNode(node); dropAllConfig.setActions(dropAction); defaultConfigs.add(dropAllConfig); log.info("Forwarding mode for node {} set to {}", node, (proactive ? "proactive" : "reactive")); for (FlowConfig fc : defaultConfigs) { Status status = (proactive) ? addStaticFlowInternal(fc, false) : removeStaticFlow(fc); if (status.isSuccess()) { log.info("{} Proactive Static flow: {}", (proactive ? "Installed" : "Removed"), fc.getName()); } else { log.warn("Failed to {} Proactive Static flow: {}", (proactive ? "install" : "remove"), fc.getName()); } } } /** * Remove from the databases all the flows installed on the node * * @param node */ private void cleanDatabaseForNode(Node node) { log.info("Cleaning Flow database for Node {}", node); if (nodeFlows.containsKey(node)) { List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(nodeFlows.get(node)); for (FlowEntryInstall entry : toRemove) { updateLocalDatabase(entry, false); } } } private boolean doesFlowContainNodeConnector(Flow flow, NodeConnector nc) { if (nc == null) { return false; } Match match = flow.getMatch(); if (match.isPresent(MatchType.IN_PORT)) { NodeConnector matchPort = (NodeConnector) match.getField(MatchType.IN_PORT).getValue(); if (matchPort.equals(nc)) { return true; } } List<Action> actionsList = flow.getActions(); if (actionsList != null) { for (Action action : actionsList) { if (action instanceof Output) { NodeConnector actionPort = ((Output) action).getPort(); if (actionPort.equals(nc)) { return true; } } } } return false; } @Override public void notifyNode(Node node, UpdateType type, Map<String, Property> propMap) { this.pendingEvents.offer(new NodeUpdateEvent(type, node)); } @Override public void notifyNodeConnector(NodeConnector nodeConnector, UpdateType type, Map<String, Property> propMap) { } private FlowConfig getDerivedFlowConfig(FlowConfig original, String configName, Short port) { FlowConfig derivedFlow = new FlowConfig(original); derivedFlow.setDynamic(true); derivedFlow.setPortGroup(null); derivedFlow.setName(original.getName() + "_" + configName + "_" + port); derivedFlow.setIngressPort(port + ""); return derivedFlow; } private void addPortGroupFlows(PortGroupConfig config, Node node, PortGroup data) { for (FlowConfig staticFlow : staticFlows.values()) { if (staticFlow.getPortGroup() == null) { continue; } if ((staticFlow.getNode().equals(node)) && (staticFlow.getPortGroup().equals(config.getName()))) { for (Short port : data.getPorts()) { FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port); addStaticFlowInternal(derivedFlow, false); } } } } private void removePortGroupFlows(PortGroupConfig config, Node node, PortGroup data) { for (FlowConfig staticFlow : staticFlows.values()) { if (staticFlow.getPortGroup() == null) { continue; } if (staticFlow.getNode().equals(node) && staticFlow.getPortGroup().equals(config.getName())) { for (Short port : data.getPorts()) { FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port); removeStaticFlow(derivedFlow); } } } } @Override public void portGroupChanged(PortGroupConfig config, Map<Node, PortGroup> data, boolean add) { log.info("PortGroup Changed for: {} Data: {}", config, portGroupData); Map<Node, PortGroup> existingData = portGroupData.get(config); if (existingData != null) { for (Map.Entry<Node, PortGroup> entry : data.entrySet()) { PortGroup existingPortGroup = existingData.get(entry.getKey()); if (existingPortGroup == null) { if (add) { existingData.put(entry.getKey(), entry.getValue()); addPortGroupFlows(config, entry.getKey(), entry.getValue()); } } else { if (add) { existingPortGroup.getPorts().addAll(entry.getValue().getPorts()); addPortGroupFlows(config, entry.getKey(), entry.getValue()); } else { existingPortGroup.getPorts().removeAll(entry.getValue().getPorts()); removePortGroupFlows(config, entry.getKey(), entry.getValue()); } } } } else { if (add) { portGroupData.put(config, data); for (Node swid : data.keySet()) { addPortGroupFlows(config, swid, data.get(swid)); } } } } @Override public boolean addPortGroupConfig(String name, String regex, boolean restore) { PortGroupConfig config = portGroupConfigs.get(name); if (config != null) { return false; } if ((portGroupProvider == null) && !restore) { return false; } if ((portGroupProvider != null) && (!portGroupProvider.isMatchCriteriaSupported(regex))) { return false; } config = new PortGroupConfig(name, regex); portGroupConfigs.put(name, config); if (portGroupProvider != null) { portGroupProvider.createPortGroupConfig(config); } return true; } @Override public boolean delPortGroupConfig(String name) { PortGroupConfig config = portGroupConfigs.get(name); if (config == null) { return false; } if (portGroupProvider != null) { portGroupProvider.deletePortGroupConfig(config); } portGroupConfigs.remove(name); return true; } private void usePortGroupConfig(String name) { PortGroupConfig config = portGroupConfigs.get(name); if (config == null) { return; } if (portGroupProvider != null) { Map<Node, PortGroup> data = portGroupProvider.getPortGroupData(config); portGroupData.put(config, data); } } @Override public Map<String, PortGroupConfig> getPortGroupConfigs() { return portGroupConfigs; } public boolean isPortGroupSupported() { if (portGroupProvider == null) { return false; } return true; } public void setIContainer(IContainer s) { this.container = s; } public void unsetIContainer(IContainer s) { if (this.container == s) { this.container = null; } } @Override public PortGroupProvider getPortGroupProvider() { return portGroupProvider; } public void unsetPortGroupProvider(PortGroupProvider portGroupProvider) { this.portGroupProvider = null; } public void setPortGroupProvider(PortGroupProvider portGroupProvider) { this.portGroupProvider = portGroupProvider; portGroupProvider.registerPortGroupChange(this); for (PortGroupConfig config : portGroupConfigs.values()) { portGroupProvider.createPortGroupConfig(config); } } public void setFrmAware(IForwardingRulesManagerAware obj) { this.frmAware.add(obj); } public void unsetFrmAware(IForwardingRulesManagerAware obj) { this.frmAware.remove(obj); } void setClusterContainerService(IClusterContainerServices s) { log.debug("Cluster Service set"); this.clusterContainerService = s; } void unsetClusterContainerService(IClusterContainerServices s) { if (this.clusterContainerService == s) { log.debug("Cluster Service removed!"); this.clusterContainerService = null; } } private String getContainerName() { if (container == null) { return GlobalConstants.DEFAULT.toString(); } return container.getName(); } /** * Function called by the dependency manager when all the required * dependencies are satisfied * */ void init() { frmFileName = GlobalConstants.STARTUPHOME.toString() + "frm_staticflows_" + this.getContainerName() + ".conf"; portGroupFileName = GlobalConstants.STARTUPHOME.toString() + "portgroup_" + this.getContainerName() + ".conf"; inContainerMode = false; if (portGroupProvider != null) { portGroupProvider.registerPortGroupChange(this); } cacheStartup(); registerWithOSGIConsole(); /* * If we are not the first cluster node to come up, do not initialize * the static flow entries ordinal */ if (staticFlowsOrdinal.size() == 0) { staticFlowsOrdinal.put(0, Integer.valueOf(0)); } pendingEvents = new LinkedBlockingQueue<FRMEvent>(); // Initialize the event handler thread frmEventHandler = new Thread(new Runnable() { @Override public void run() { while (!stopping) { try { FRMEvent event = pendingEvents.take(); if (event == null) { log.warn("Dequeued null event"); continue; } if (event instanceof NodeUpdateEvent) { NodeUpdateEvent update = (NodeUpdateEvent) event; Node node = update.getNode(); switch (update.getUpdateType()) { case ADDED: addStaticFlowsToSwitch(node); break; case REMOVED: cleanDatabaseForNode(node); updateStaticFlowConfigsOnNodeDown(node); break; default: } } else if (event instanceof ErrorReportedEvent) { ErrorReportedEvent errEvent = (ErrorReportedEvent) event; processErrorEvent(errEvent); } else if (event instanceof WorkOrderEvent) { /* * Take care of handling the remote Work request */ WorkOrderEvent work = (WorkOrderEvent) event; FlowEntryDistributionOrder fe = work.getFe(); if (fe != null) { logsync.trace("Executing the workOrder {}", fe); Status gotStatus = null; FlowEntryInstall feiCurrent = fe.getEntry(); FlowEntryInstall feiNew = workOrder.get(fe.getEntry()); switch (fe.getUpType()) { case ADDED: /* * TODO: Not still sure how to handle the * sync entries */ gotStatus = addEntriesInternal(feiCurrent, true); break; case CHANGED: gotStatus = modifyEntryInternal(feiCurrent, feiNew, true); break; case REMOVED: gotStatus = removeEntryInternal(feiCurrent, true); break; } // Remove the Order workOrder.remove(fe); logsync.trace( "The workOrder has been executed and now the status is being returned {}", fe); // Place the status workStatus.put(fe, gotStatus); } else { log.warn("Not expected null WorkOrder", work); } } else if (event instanceof WorkStatusCleanup) { /* * Take care of handling the remote Work request */ WorkStatusCleanup work = (WorkStatusCleanup) event; FlowEntryDistributionOrder fe = work.getFe(); if (fe != null) { logsync.trace("The workStatus {} is being removed", fe); workStatus.remove(fe); } else { log.warn("Not expected null WorkStatus", work); } } else { log.warn("Dequeued unknown event {}", event.getClass() .getSimpleName()); } } catch (InterruptedException e) { // clear pending events pendingEvents.clear(); } } } }, "FRM EventHandler Collector"); } /** * Function called by the dependency manager when at least one dependency * become unsatisfied or when the component is shutting down because for * example bundle is being stopped. * */ void destroy() { // Interrupt the thread frmEventHandler.interrupt(); // Clear the pendingEvents queue pendingEvents.clear(); frmAware.clear(); workMonitor.clear(); } /** * Function called by dependency manager after "init ()" is called and after * the services provided by the class are registered in the service registry * */ void start() { // Initialize graceful stop flag stopping = false; // Allocate the executor service this.executor = Executors.newSingleThreadExecutor(); // Start event handler thread frmEventHandler.start(); /* * Read startup and build database if we have not already gotten the * configurations synced from another node */ if (staticFlows.isEmpty()) { loadFlowConfiguration(); } } /** * Function called by the dependency manager before the services exported by * the component are unregistered, this will be followed by a "destroy ()" * calls */ void stop() { stopping = true; uninstallAllFlowEntries(false); // Shutdown executor this.executor.shutdownNow(); } public void setFlowProgrammerService(IFlowProgrammerService service) { this.programmer = service; } public void unsetFlowProgrammerService(IFlowProgrammerService service) { if (this.programmer == service) { this.programmer = null; } } public void setSwitchManager(ISwitchManager switchManager) { this.switchManager = switchManager; } public void unsetSwitchManager(ISwitchManager switchManager) { if (this.switchManager == switchManager) { this.switchManager = null; } } @Override public void tagUpdated(String containerName, Node n, short oldTag, short newTag, UpdateType t) { if (!container.getName().equals(containerName)) { return; } } @Override public void containerFlowUpdated(String containerName, ContainerFlow previous, ContainerFlow current, UpdateType t) { if (!container.getName().equals(containerName)) { return; } log.trace("Container {}: Updating installed flows because of container flow change: {} {}", container.getName(), t, current); /* * Whether it is an addition or removal, we have to recompute the merged * flows entries taking into account all the current container flows * because flow merging is not an injective function */ updateFlowsContainerFlow(); } @Override public void nodeConnectorUpdated(String containerName, NodeConnector nc, UpdateType t) { if (!container.getName().equals(containerName)) { return; } boolean updateStaticFlowCluster = false; switch (t) { case REMOVED: List<FlowEntryInstall> nodeFlowEntries = nodeFlows.get(nc.getNode()); if (nodeFlowEntries == null) { return; } for (FlowEntryInstall fei : new ArrayList<FlowEntryInstall>(nodeFlowEntries)) { if (doesFlowContainNodeConnector(fei.getInstall().getFlow(), nc)) { Status status = this.removeEntryInternal(fei, true); if (!status.isSuccess()) { continue; } /* * If the flow entry is a static flow, then update its * configuration */ if (fei.getGroupName().equals(FlowConfig.STATICFLOWGROUP)) { FlowConfig flowConfig = getStaticFlow(fei.getFlowName(), fei.getNode()); if (flowConfig != null) { flowConfig.setStatus(PORTREMOVED); updateStaticFlowCluster = true; } } } } if (updateStaticFlowCluster) { refreshClusterStaticFlowsStatus(nc.getNode()); } break; case ADDED: List<FlowConfig> flowConfigForNode = getStaticFlows(nc.getNode()); for (FlowConfig flowConfig : flowConfigForNode) { if (doesFlowContainNodeConnector(flowConfig.getFlow(), nc)) { if (flowConfig.installInHw()) { Status status = this.installFlowEntry(flowConfig.getFlowEntry()); if (!status.isSuccess()) { flowConfig.setStatus(status.getDescription()); } else { flowConfig.setStatus(SUCCESS); } updateStaticFlowCluster = true; } } } if (updateStaticFlowCluster) { refreshClusterStaticFlowsStatus(nc.getNode()); } break; case CHANGED: break; default: } } @Override public void containerModeUpdated(UpdateType update) { // Only default container instance reacts on this event if (!container.getName().equals(GlobalConstants.DEFAULT.toString())) { return; } switch (update) { case ADDED: /* * Controller is moving to container mode. We are in the default * container context, we need to remove all our non-internal flows * to prevent any container isolation breakage. We also need to * preserve our flow so that they can be re-installed if we move * back to non container mode (no containers). */ this.inContainerMode = true; this.uninstallAllFlowEntries(true); break; case REMOVED: this.inContainerMode = false; this.reinstallAllFlowEntries(); break; default: } // Update our configuration DB updateStaticFlowConfigsOnContainerModeChange(update); } protected abstract class FRMEvent { } private class NodeUpdateEvent extends FRMEvent { private final Node node; private final UpdateType update; public NodeUpdateEvent(UpdateType update, Node node) { this.update = update; this.node = node; } public UpdateType getUpdateType() { return update; } public Node getNode() { return node; } } private class ErrorReportedEvent extends FRMEvent { private final long rid; private final Node node; private final Object error; public ErrorReportedEvent(long rid, Node node, Object error) { this.rid = rid; this.node = node; this.error = error; } public long getRequestId() { return rid; } public Object getError() { return error; } public Node getNode() { return node; } } private class WorkOrderEvent extends FRMEvent { private FlowEntryDistributionOrder fe; private FlowEntryInstall newEntry; /** * @param fe * @param newEntry */ WorkOrderEvent(FlowEntryDistributionOrder fe, FlowEntryInstall newEntry) { this.fe = fe; this.newEntry = newEntry; } /** * @return the fe */ public FlowEntryDistributionOrder getFe() { return fe; } /** * @return the newEntry */ public FlowEntryInstall getNewEntry() { return newEntry; } } private class WorkStatusCleanup extends FRMEvent { private FlowEntryDistributionOrder fe; /** * @param fe */ WorkStatusCleanup(FlowEntryDistributionOrder fe) { this.fe = fe; } /** * @return the fe */ public FlowEntryDistributionOrder getFe() { return fe; } } /* * OSGI COMMANDS */ @Override public String getHelp() { StringBuffer help = new StringBuffer(); help.append("---FRM Matrix Application---\n"); help.append("\t printMatrixData - Prints the Matrix Configs\n"); help.append("\t addMatrixConfig <name> <regex>\n"); help.append("\t delMatrixConfig <name>\n"); help.append("\t useMatrixConfig <name>\n"); return help.toString(); } public void _printMatrixData(CommandInterpreter ci) { ci.println("Configs : "); ci.println("---------"); ci.println(portGroupConfigs); ci.println("Data : "); ci.println("------"); ci.println(portGroupData); } public void _addMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); String regex = ci.nextArgument(); addPortGroupConfig(name, regex, false); } public void _delMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); delPortGroupConfig(name); } public void _useMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); usePortGroupConfig(name); } public void _arpPunt(CommandInterpreter ci) { String switchId = ci.nextArgument(); long swid = HexEncode.stringToLong(switchId); Node node = NodeCreator.createOFNode(swid); installImplicitARPReplyPunt(node); } public void _frmaddflow(CommandInterpreter ci) throws UnknownHostException { Node node = null; String nodeId = ci.nextArgument(); if (nodeId == null) { ci.print("Node id not specified"); return; } try { node = NodeCreator.createOFNode(Long.valueOf(nodeId)); } catch (NumberFormatException e) { ci.print("Node id not a number"); return; } ci.println(this.programmer.addFlow(node, getSampleFlow(node))); } public void _frmremoveflow(CommandInterpreter ci) throws UnknownHostException { Node node = null; String nodeId = ci.nextArgument(); if (nodeId == null) { ci.print("Node id not specified"); return; } try { node = NodeCreator.createOFNode(Long.valueOf(nodeId)); } catch (NumberFormatException e) { ci.print("Node id not a number"); return; } ci.println(this.programmer.removeFlow(node, getSampleFlow(node))); } private Flow getSampleFlow(Node node) throws UnknownHostException { NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node); NodeConnector oport = NodeConnectorCreator.createOFNodeConnector((short) 30, node); byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc }; byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f }; InetAddress srcIP = InetAddress.getByName("172.28.30.50"); InetAddress dstIP = InetAddress.getByName("171.71.9.52"); InetAddress ipMask = InetAddress.getByName("255.255.255.0"); InetAddress ipMask2 = InetAddress.getByName("255.0.0.0"); short ethertype = EtherTypes.IPv4.shortValue(); short vlan = (short) 27; byte vlanPr = 3; Byte tos = 4; byte proto = IPProtocols.TCP.byteValue(); short src = (short) 55000; short dst = 80; /* * Create a SAL Flow aFlow */ Match match = new Match(); match.setField(MatchType.IN_PORT, port); match.setField(MatchType.DL_SRC, srcMac); match.setField(MatchType.DL_DST, dstMac); match.setField(MatchType.DL_TYPE, ethertype); match.setField(MatchType.DL_VLAN, vlan); match.setField(MatchType.DL_VLAN_PR, vlanPr); match.setField(MatchType.NW_SRC, srcIP, ipMask); match.setField(MatchType.NW_DST, dstIP, ipMask2); match.setField(MatchType.NW_TOS, tos); match.setField(MatchType.NW_PROTO, proto); match.setField(MatchType.TP_SRC, src); match.setField(MatchType.TP_DST, dst); List<Action> actions = new ArrayList<Action>(); actions.add(new Output(oport)); actions.add(new PopVlan()); actions.add(new Flood()); actions.add(new Controller()); return new Flow(match, actions); } @Override public Status saveConfiguration() { return saveConfig(); } public void _frmNodeFlows(CommandInterpreter ci) { String nodeId = ci.nextArgument(); Node node = Node.fromString(nodeId); if (node == null) { ci.println("frmNodeFlows <node> [verbose]"); return; } boolean verbose = false; String verboseCheck = ci.nextArgument(); if (verboseCheck != null) { verbose = verboseCheck.equals("true"); } if (!nodeFlows.containsKey(node)) { return; } // Dump per node database for (FlowEntryInstall entry : nodeFlows.get(node)) { if (!verbose) { ci.println(node + " " + installedSwView.get(entry).getFlowName()); } else { ci.println(node + " " + installedSwView.get(entry).toString()); } } } public void _frmGroupFlows(CommandInterpreter ci) { String group = ci.nextArgument(); if (group == null) { ci.println("frmGroupFlows <group> [verbose]"); return; } boolean verbose = false; String verboseCheck = ci.nextArgument(); if (verboseCheck != null) { verbose = verboseCheck.equalsIgnoreCase("true"); } if (!groupFlows.containsKey(group)) { return; } // Dump per node database ci.println("Group " + group + ":\n"); for (FlowEntryInstall flowEntry : groupFlows.get(group)) { if (!verbose) { ci.println(flowEntry.getNode() + " " + flowEntry.getFlowName()); } else { ci.println(flowEntry.getNode() + " " + flowEntry.toString()); } } } @Override public void flowRemoved(Node node, Flow flow) { log.trace("Received flow removed notification on {} for {}", node, flow); // For flow entry identification, only node, match and priority matter FlowEntryInstall test = new FlowEntryInstall(new FlowEntry("", "", flow, node), null); FlowEntryInstall installedEntry = this.installedSwView.get(test); if (installedEntry == null) { log.trace("Entry is not known to us"); return; } // Update Static flow status Integer key = 0; FlowConfig target = null; for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig conf = entry.getValue(); if (conf.isByNameAndNodeIdEqual(installedEntry.getFlowName(), node)) { key = entry.getKey(); target = conf; break; } } if (target != null) { // Update Configuration database target.toggleInstallation(); target.setStatus(SUCCESS); staticFlows.put(key, target); } // Update software views this.updateLocalDatabase(installedEntry, false); } @Override public void flowErrorReported(Node node, long rid, Object err) { log.trace("Got error {} for message rid {} from node {}", new Object[] { err, rid, node }); pendingEvents.offer(new ErrorReportedEvent(rid, node, err)); } private void processErrorEvent(ErrorReportedEvent event) { Node node = event.getNode(); long rid = event.getRequestId(); Object error = event.getError(); String errorString = (error == null) ? "Not provided" : error.toString(); /* * If this was for a flow install, remove the corresponding entry from * the software view. If it was a Looking for the rid going through the * software database. TODO: A more efficient rid <-> FlowEntryInstall * mapping will have to be added in future */ FlowEntryInstall target = null; for (FlowEntryInstall index : nodeFlows.get(node)) { FlowEntryInstall entry = installedSwView.get(index); if (entry.getRequestId() == rid) { target = entry; break; } } if (target != null) { // This was a flow install, update database this.updateLocalDatabase(target, false); } // Notify listeners if (frmAware != null) { synchronized (frmAware) { for (IForwardingRulesManagerAware frma : frmAware) { try { frma.requestFailed(rid, errorString); } catch (Exception e) { log.warn("Failed to notify {}", frma); } } } } } @Override public Status solicitStatusResponse(Node node, boolean blocking) { Status rv = new Status(StatusCode.INTERNALERROR); if (this.programmer != null) { if (blocking) { rv = programmer.syncSendBarrierMessage(node); } else { rv = programmer.asyncSendBarrierMessage(node); } } return rv; } public void unsetIConnectionManager(IConnectionManager s) { if (s == this.connectionManager) { this.connectionManager = null; } } public void setIConnectionManager(IConnectionManager s) { this.connectionManager = s; } @Override public void entryCreated(Object key, String cacheName, boolean originLocal) { /* * Do nothing */ } @Override public void entryUpdated(Object key, Object new_value, String cacheName, boolean originLocal) { if (originLocal) { /* * Local updates are of no interest */ return; } if (cacheName.equals(WORKORDERCACHE)) { logsync.trace("Got a WorkOrderCacheUpdate for {}", key); /* * This is the case of one workOrder becoming available, so we need * to dispatch the work to the appropriate handler */ FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key; FlowEntryInstall fei = fe.getEntry(); if (fei == null) { return; } Node n = fei.getNode(); if (connectionManager.isLocal(n)) { logsync.trace("workOrder for fe {} processed locally", fe); // I'm the controller in charge for the request, queue it for // processing pendingEvents.offer(new WorkOrderEvent(fe, (FlowEntryInstall) new_value)); } } else if (cacheName.equals(WORKSTATUSCACHE)) { logsync.trace("Got a WorkStatusCacheUpdate for {}", key); /* * This is the case of one workOrder being completed and a status * returned */ FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key; /* * Check if the order was initiated by this controller in that case * we need to actually look at the status returned */ if (fe.getRequestorController() .equals(clusterContainerService.getMyAddress())) { FlowEntryDistributionOrderFutureTask fet = workMonitor.get(fe); if (fet != null) { logsync.trace("workStatus response is for us {}", fe); // Signal we got the status fet.gotStatus(fe, workStatus.get(fe)); pendingEvents.offer(new WorkStatusCleanup(fe)); } } } } @Override public void entryDeleted(Object key, String cacheName, boolean originLocal) { /* * Do nothing */ } }
opendaylight/forwardingrulesmanager/implementation/src/main/java/org/opendaylight/controller/forwardingrulesmanager/internal/ForwardingRulesManager.java
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.forwardingrulesmanager.internal; import java.io.FileNotFoundException; import java.io.IOException; import java.io.ObjectInputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import org.eclipse.osgi.framework.console.CommandInterpreter; import org.eclipse.osgi.framework.console.CommandProvider; import org.opendaylight.controller.clustering.services.CacheConfigException; import org.opendaylight.controller.clustering.services.CacheExistException; import org.opendaylight.controller.clustering.services.ICacheUpdateAware; import org.opendaylight.controller.clustering.services.IClusterContainerServices; import org.opendaylight.controller.clustering.services.IClusterServices; import org.opendaylight.controller.configuration.IConfigurationContainerAware; import org.opendaylight.controller.connectionmanager.IConnectionManager; import org.opendaylight.controller.forwardingrulesmanager.FlowConfig; import org.opendaylight.controller.forwardingrulesmanager.FlowEntry; import org.opendaylight.controller.forwardingrulesmanager.FlowEntryInstall; import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManager; import org.opendaylight.controller.forwardingrulesmanager.IForwardingRulesManagerAware; import org.opendaylight.controller.forwardingrulesmanager.PortGroup; import org.opendaylight.controller.forwardingrulesmanager.PortGroupChangeListener; import org.opendaylight.controller.forwardingrulesmanager.PortGroupConfig; import org.opendaylight.controller.forwardingrulesmanager.PortGroupProvider; import org.opendaylight.controller.forwardingrulesmanager.implementation.data.FlowEntryDistributionOrder; import org.opendaylight.controller.sal.action.Action; import org.opendaylight.controller.sal.action.ActionType; import org.opendaylight.controller.sal.action.Controller; import org.opendaylight.controller.sal.action.Flood; import org.opendaylight.controller.sal.action.Output; import org.opendaylight.controller.sal.action.PopVlan; import org.opendaylight.controller.sal.core.ContainerFlow; import org.opendaylight.controller.sal.core.IContainer; import org.opendaylight.controller.sal.core.IContainerListener; import org.opendaylight.controller.sal.core.Node; import org.opendaylight.controller.sal.core.NodeConnector; import org.opendaylight.controller.sal.core.Property; import org.opendaylight.controller.sal.core.UpdateType; import org.opendaylight.controller.sal.flowprogrammer.Flow; import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener; import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService; import org.opendaylight.controller.sal.match.Match; import org.opendaylight.controller.sal.match.MatchType; import org.opendaylight.controller.sal.utils.EtherTypes; import org.opendaylight.controller.sal.utils.GlobalConstants; import org.opendaylight.controller.sal.utils.HexEncode; import org.opendaylight.controller.sal.utils.IObjectReader; import org.opendaylight.controller.sal.utils.IPProtocols; import org.opendaylight.controller.sal.utils.NodeConnectorCreator; import org.opendaylight.controller.sal.utils.NodeCreator; import org.opendaylight.controller.sal.utils.ObjectReader; import org.opendaylight.controller.sal.utils.ObjectWriter; import org.opendaylight.controller.sal.utils.Status; import org.opendaylight.controller.sal.utils.StatusCode; import org.opendaylight.controller.switchmanager.IInventoryListener; import org.opendaylight.controller.switchmanager.ISwitchManager; import org.opendaylight.controller.switchmanager.ISwitchManagerAware; import org.opendaylight.controller.switchmanager.Subnet; import org.osgi.framework.BundleContext; import org.osgi.framework.FrameworkUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class that manages forwarding rule installation and removal per container of * the network. It also maintains the central repository of all the forwarding * rules installed on the network nodes. */ public class ForwardingRulesManager implements IForwardingRulesManager, PortGroupChangeListener, IContainerListener, ISwitchManagerAware, IConfigurationContainerAware, IInventoryListener, IObjectReader, ICacheUpdateAware, CommandProvider, IFlowProgrammerListener { private static final String NODEDOWN = "Node is Down"; private static final String SUCCESS = StatusCode.SUCCESS.toString(); private static final Logger log = LoggerFactory.getLogger(ForwardingRulesManager.class); private static final String PORTREMOVED = "Port removed"; private static final Logger logsync = LoggerFactory.getLogger("FRMsync"); private String frmFileName; private String portGroupFileName; private ConcurrentMap<Integer, FlowConfig> staticFlows; private ConcurrentMap<Integer, Integer> staticFlowsOrdinal; private ConcurrentMap<String, PortGroupConfig> portGroupConfigs; private ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>> portGroupData; private ConcurrentMap<String, Object> TSPolicies; private boolean inContainerMode; // being used by global instance only private boolean stopping; /* * Flow database. It's the software view of what was requested to install * and what is installed on the switch. It is indexed by the entry itself. * The entry's hashcode resumes the network node index, the flow's priority * and the flow's match. The value element is a class which contains the * flow entry pushed by the applications modules and the respective * container flow merged version. In absence of container flows, the two * flow entries are the same. */ private ConcurrentMap<FlowEntry, FlowEntry> originalSwView; private ConcurrentMap<FlowEntryInstall, FlowEntryInstall> installedSwView; /* * Per node and per group indexing */ private ConcurrentMap<Node, List<FlowEntryInstall>> nodeFlows; private ConcurrentMap<String, List<FlowEntryInstall>> groupFlows; /* * Inactive flow list. This is for the global instance of FRM It will * contain all the flow entries which were installed on the global container * when the first container is created. */ private ConcurrentMap<FlowEntry, FlowEntry> inactiveFlows; private IContainer container; private Set<IForwardingRulesManagerAware> frmAware = Collections.synchronizedSet(new HashSet<IForwardingRulesManagerAware>()); private PortGroupProvider portGroupProvider; private IFlowProgrammerService programmer; private IClusterContainerServices clusterContainerService = null; private ISwitchManager switchManager; private Thread frmEventHandler; protected BlockingQueue<FRMEvent> pendingEvents; // Distributes FRM programming in the cluster private IConnectionManager connectionManager; /* * Name clustered caches used to support FRM entry distribution these are by * necessity non-transactional as long as need to be able to synchronize * states also while a transaction is in progress */ static final String WORKORDERCACHE = "frm.workOrder"; static final String WORKSTATUSCACHE = "frm.workStatus"; /* * Data structure responsible for distributing the FlowEntryInstall requests * in the cluster. The key value is entry that is being either Installed or * Updated or Delete. The value field is the same of the key value in case * of Installation or Deletion, it's the new entry in case of Modification, * this because the clustering caches don't allow null values. * * The logic behind this data structure is that the controller that initiate * the request will place the order here, someone will pick it and then will * remove from this data structure because is being served. * * TODO: We need to have a way to cleanup this data structure if entries are * not picked by anyone, which is always a case can happen especially on * Node disconnect cases. */ private ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall> workOrder; /* * Data structure responsible for retrieving the results of the workOrder * submitted to the cluster. * * The logic behind this data structure is that the controller that has * executed the order will then place the result in workStatus signaling * that there was a success or a failure. * * TODO: The workStatus entries need to have a lifetime associated in case * of requestor controller leaving the cluster. */ private ConcurrentMap<FlowEntryDistributionOrder, Status> workStatus; /* * Local Map used to hold the Future which a caller can use to monitor for * completion */ private ConcurrentMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask> workMonitor = new ConcurrentHashMap<FlowEntryDistributionOrder, FlowEntryDistributionOrderFutureTask>(); /* * Create an executor pool to create the distributionOrder, this is a stop * gap solution caused by an issue with non-transactional caches in the * implementation we use, being currently worked on. It has been noticed in * fact that when non-transactional caches are being used sometime the key * are no distributed to all the nodes properly. To workaround the issue * transactional caches are being used, but there was a reason for using * non-transactional caches to start with, in fact we needed to be able in * the context of a northbound transaction to program the FRM entries * irrespective of the fact that transaction would commit or no else we * would not be able to achieve the entry programming and implement the * scheme for recovery from network element failures. Bottom line, now in * order to make sure an update on a transactional cache goes out while in a * transaction that need to be initiated by a different thread. */ private ExecutorService executor; class DistributeOrderCallable implements Callable<Future<Status>> { private FlowEntryInstall e; private FlowEntryInstall u; private UpdateType t; DistributeOrderCallable(FlowEntryInstall e, FlowEntryInstall u, UpdateType t) { this.e = e; this.u = u; this.t = t; } @Override public Future<Status> call() throws Exception { if (e == null || t == null) { logsync.error("Unexpected null Entry up update type"); return null; } // Create the work order and distribute it FlowEntryDistributionOrder fe = new FlowEntryDistributionOrder(e, t, clusterContainerService.getMyAddress()); // First create the monitor job FlowEntryDistributionOrderFutureTask ret = new FlowEntryDistributionOrderFutureTask(fe); logsync.trace("Node {} not local so sending fe {}", e.getNode(), fe); workMonitor.put(fe, ret); if (t.equals(UpdateType.CHANGED)) { // Then distribute the work workOrder.put(fe, u); } else { // Then distribute the work workOrder.put(fe, e); } logsync.trace("WorkOrder requested"); // Now create an Handle to monitor the execution of the operation return ret; } } /** * @param e * Entry being installed/updated/removed * @param u * New entry will be placed after the update operation. Valid * only for UpdateType.CHANGED, null for all the other cases * @param t * Type of update * @return a Future object for monitoring the progress of the result, or * null in case the processing should take place locally */ private Future<Status> distributeWorkOrder(FlowEntryInstall e, FlowEntryInstall u, UpdateType t) { // A null entry it's an unexpected condition, anyway it's safe to keep // the handling local if (e == null) { return null; } Node n = e.getNode(); if (!connectionManager.isLocal(n)) { Callable<Future<Status>> worker = new DistributeOrderCallable(e, u, t); if (worker != null) { Future<Future<Status>> workerRes = this.executor.submit(worker); try { return workerRes.get(); } catch (InterruptedException e1) { // we where interrupted, not a big deal. return null; } catch (ExecutionException e1) { logsync.error( "We got an execution exception {} we cannot much, so returning we don't have nothing to wait for", e); return null; } } } logsync.trace("LOCAL Node {} so processing Entry:{} UpdateType:{}", n, e, t); return null; } /** * Adds a flow entry onto the network node It runs various validity checks * and derive the final container flows merged entries that will be * attempted to be installed * * @param flowEntry * the original flow entry application requested to add * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status addEntry(FlowEntry flowEntry, boolean async) { // Sanity Check if (flowEntry == null || flowEntry.getNode() == null) { String msg = "Invalid FlowEntry"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } /* * Derive the container flow merged entries to install In presence of N * container flows, we may end up with N different entries to install... */ List<FlowEntryInstall> toInstallList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows()); // Container Flow conflict Check if (toInstallList.isEmpty()) { String msg = "Flow Entry conflicts with all Container Flows"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.CONFLICT, msg); } // Derive the list of entries good to be installed List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>(); for (FlowEntryInstall entry : toInstallList) { // Conflict Check: Verify new entry would not overwrite existing // ones if (this.installedSwView.containsKey(entry)) { log.warn("Operation Rejected: A flow with same match and priority exists on the target node"); log.trace("Aborting to install {}", entry); continue; } toInstallSafe.add(entry); } // Declare failure if all the container flow merged entries clash with // existing entries if (toInstallSafe.size() == 0) { String msg = "A flow with same match and priority exists on the target node"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.CONFLICT, msg); } // Try to install an entry at the time Status error = new Status(null, null); Status succeded = null; boolean oneSucceded = false; for (FlowEntryInstall installEntry : toInstallSafe) { // Install and update database Status ret = addEntriesInternal(installEntry, async); if (ret.isSuccess()) { oneSucceded = true; /* * The first successful status response will be returned For the * asynchronous call, we can discard the container flow * complication for now and assume we will always deal with one * flow only per request */ succeded = ret; } else { error = ret; log.warn("Failed to install the entry: {}. The failure is: {}", installEntry, ret.getDescription()); } } return (oneSucceded) ? succeded : error; } /** * Given a flow entry and the list of container flows, it returns the list * of container flow merged flow entries good to be installed on this * container. If the list of container flows is null or empty, the install * entry list will contain only one entry, the original flow entry. If the * flow entry is congruent with all the N container flows, then the output * install entry list will contain N entries. If the output list is empty, * it means the passed flow entry conflicts with all the container flows. * * @param cFlowList * The list of container flows * @return the list of container flow merged entries good to be installed on * this container */ private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request, List<ContainerFlow> cFlowList) { List<FlowEntryInstall> toInstallList = new ArrayList<FlowEntryInstall>(1); if (container.getContainerFlows() == null || container.getContainerFlows().isEmpty()) { // No container flows => entry good to be installed unchanged toInstallList.add(new FlowEntryInstall(request.clone(), null)); } else { // Create the list of entries to be installed. If the flow entry is // not congruent with any container flow, no install entries will be // created for (ContainerFlow cFlow : container.getContainerFlows()) { if (cFlow.allowsFlow(request.getFlow())) { toInstallList.add(new FlowEntryInstall(request.clone(), cFlow)); } } } return toInstallList; } /** * Modify a flow entry with a new one It runs various validity check and * derive the final container flows merged flow entries to work with * * @param currentFlowEntry * @param newFlowEntry * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status modifyEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry, boolean async) { Status retExt; // Sanity checks if (currentFlowEntry == null || currentFlowEntry.getNode() == null || newFlowEntry == null || newFlowEntry.getNode() == null) { String msg = "Modify: Invalid FlowEntry"; String logMsg = msg + ": {} or {}"; log.warn(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (!currentFlowEntry.getNode().equals(newFlowEntry.getNode()) || !currentFlowEntry.getFlowName().equals(newFlowEntry.getFlowName())) { String msg = "Modify: Incompatible Flow Entries"; String logMsg = msg + ": {} and {}"; log.warn(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Equality Check if (currentFlowEntry.getFlow().equals(newFlowEntry.getFlow())) { String msg = "Modify skipped as flows are the same"; String logMsg = msg + ": {} and {}"; log.debug(logMsg, currentFlowEntry, newFlowEntry); return new Status(StatusCode.SUCCESS, msg); } /* * Conflict Check: Verify the new entry would not conflict with an * existing one. This is a loose check on the previous original flow * entry requests. No check on the container flow merged flow entries * (if any) yet */ FlowEntry sameMatchOriginalEntry = originalSwView.get(newFlowEntry); if (sameMatchOriginalEntry != null && !sameMatchOriginalEntry.equals(currentFlowEntry)) { String msg = "Operation Rejected: Another flow with same match and priority exists on the target node"; String logMsg = msg + ": {}"; log.warn(logMsg, currentFlowEntry); return new Status(StatusCode.CONFLICT, msg); } // Derive the installed and toInstall entries List<FlowEntryInstall> installedList = deriveInstallEntries(currentFlowEntry.clone(), container.getContainerFlows()); List<FlowEntryInstall> toInstallList = deriveInstallEntries(newFlowEntry.clone(), container.getContainerFlows()); if (toInstallList.isEmpty()) { String msg = "Modify Operation Rejected: The new entry conflicts with all the container flows"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowEntry); log.warn(msg); return new Status(StatusCode.CONFLICT, msg); } /* * If the two list sizes differ, it means the new flow entry does not * satisfy the same number of container flows the current entry does. * This is only possible when the new entry and current entry have * different match. In this scenario the modification would ultimately * be handled as a remove and add operations in the protocol plugin. * * Also, if any of the new flow entries would clash with an existing * one, we cannot proceed with the modify operation, because it would * fail for some entries and leave stale entries on the network node. * Modify path can be taken only if it can be performed completely, for * all entries. * * So, for the above two cases, to simplify, let's decouple the modify * in: 1) remove current entries 2) install new entries */ Status succeeded = null; boolean decouple = false; if (installedList.size() != toInstallList.size()) { log.info("Modify: New flow entry does not satisfy the same " + "number of container flows as the original entry does"); decouple = true; } List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>(); for (FlowEntryInstall installEntry : toInstallList) { /* * Conflict Check: Verify the new entry would not overwrite another * existing one */ FlowEntryInstall sameMatchEntry = installedSwView.get(installEntry); if (sameMatchEntry != null && !sameMatchEntry.getOriginal().equals(currentFlowEntry)) { log.info("Modify: new container flow merged flow entry clashes with existing flow"); decouple = true; } else { toInstallSafe.add(installEntry); } } if (decouple) { // Remove current entries for (FlowEntryInstall currEntry : installedList) { this.removeEntryInternal(currEntry, async); } // Install new entries for (FlowEntryInstall newEntry : toInstallSafe) { succeeded = this.addEntriesInternal(newEntry, async); } } else { /* * The two list have the same size and the entries to install do not * clash with any existing flow on the network node. We assume here * (and might be wrong) that the same container flows that were * satisfied by the current entries are the same that are satisfied * by the new entries. Let's take the risk for now. * * Note: modification has to be complete. If any entry modification * fails, we need to stop, restore the already modified entries, and * declare failure. */ Status retModify = null; int i = 0; int size = toInstallList.size(); while (i < size) { // Modify and update database retModify = modifyEntryInternal(installedList.get(i), toInstallList.get(i), async); if (retModify.isSuccess()) { i++; } else { break; } } // Check if uncompleted modify if (i < size) { log.warn("Unable to perform a complete modify for all the container flows merged entries"); // Restore original entries int j = 0; while (j < i) { log.info("Attempting to restore initial entries"); retExt = modifyEntryInternal(toInstallList.get(i), installedList.get(i), async); if (retExt.isSuccess()) { j++; } else { break; } } // Fatal error, recovery failed if (j < i) { String msg = "Flow recovery failed ! Unrecoverable Error"; log.error(msg); return new Status(StatusCode.INTERNALERROR, msg); } } succeeded = retModify; } /* * The first successful status response will be returned. For the * asynchronous call, we can discard the container flow complication for * now and assume we will always deal with one flow only per request */ return succeeded; } /** * This is the function that modifies the final container flows merged * entries on the network node and update the database. It expects that all * the validity checks are passed * * @param currentEntries * @param newEntries * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status modifyEntryInternal(FlowEntryInstall currentEntries, FlowEntryInstall newEntries, boolean async) { Future<Status> futureStatus = distributeWorkOrder(currentEntries, newEntries, UpdateType.CHANGED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Modify the flow on the network node Status status = async ? programmer.modifyFlowAsync(currentEntries.getNode(), currentEntries.getInstall() .getFlow(), newEntries.getInstall() .getFlow()) : programmer.modifyFlow(currentEntries.getNode(), currentEntries.getInstall() .getFlow(), newEntries.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", newEntries.getInstall(), status.getDescription()); return status; } log.trace("Modified {} => {}", currentEntries.getInstall(), newEntries.getInstall()); // Update DB newEntries.setRequestId(status.getRequestId()); updateLocalDatabase(currentEntries, false); updateLocalDatabase(newEntries, true); return status; } } /** * Remove a flow entry. If the entry is not present in the software view * (entry or node not present), it return successfully * * @param flowEntry * the flow entry to remove * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status removeEntry(FlowEntry flowEntry, boolean async) { Status error = new Status(null, null); // Sanity Check if (flowEntry == null || flowEntry.getNode() == null) { String msg = "Invalid FlowEntry"; String logMsg = msg + ": {}"; log.warn(logMsg, flowEntry); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Derive the container flows merged installed entries List<FlowEntryInstall> installedList = deriveInstallEntries(flowEntry.clone(), container.getContainerFlows()); Status succeeded = null; boolean atLeastOneRemoved = false; for (FlowEntryInstall entry : installedList) { if (!installedSwView.containsKey(entry)) { String logMsg = "Removal skipped (not present in software view) for flow entry: {}"; log.debug(logMsg, flowEntry); if (installedList.size() == 1) { // If we had only one entry to remove, we are done return new Status(StatusCode.SUCCESS); } else { continue; } } // Remove and update DB Status ret = removeEntryInternal(entry, async); if (!ret.isSuccess()) { error = ret; log.warn("Failed to remove the entry: {}. The failure is: {}", entry.getInstall(), ret.getDescription()); if (installedList.size() == 1) { // If we had only one entry to remove, this is fatal failure return error; } } else { succeeded = ret; atLeastOneRemoved = true; } } /* * No worries if full removal failed. Consistency checker will take care * of removing the stale entries later, or adjusting the software * database if not in sync with hardware */ return (atLeastOneRemoved) ? succeeded : error; } /** * This is the function that removes the final container flows merged entry * from the network node and update the database. It expects that all the * validity checks are passed * * @param entry * the flow entry to remove * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status removeEntryInternal(FlowEntryInstall entry, boolean async) { Future<Status> futureStatus = distributeWorkOrder(entry, null, UpdateType.REMOVED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Mark the entry to be deleted (for CC just in case we fail) entry.toBeDeleted(); // Remove from node Status status = async ? programmer.removeFlowAsync(entry.getNode(), entry.getInstall() .getFlow()) : programmer.removeFlow(entry.getNode(), entry.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(), status.getDescription()); return status; } log.trace("Removed {}", entry.getInstall()); // Update DB updateLocalDatabase(entry, false); return status; } } /** * This is the function that installs the final container flow merged entry * on the network node and updates the database. It expects that all the * validity and conflict checks are passed. That means it does not check * whether this flow would conflict or overwrite an existing one. * * @param entry * the flow entry to install * @param async * the flag indicating if this is a asynchronous request * @return the status of this request. In case of asynchronous call, it will * contain the unique id assigned to this request */ private Status addEntriesInternal(FlowEntryInstall entry, boolean async) { Future<Status> futureStatus = distributeWorkOrder(entry, null, UpdateType.ADDED); if (futureStatus != null) { Status retStatus = new Status(StatusCode.UNDEFINED); try { retStatus = futureStatus.get(); } catch (InterruptedException e) { log.error("", e); } catch (ExecutionException e) { log.error("", e); } return retStatus; } else { // Install the flow on the network node Status status = async ? programmer.addFlowAsync(entry.getNode(), entry.getInstall() .getFlow()) : programmer.addFlow(entry.getNode(), entry.getInstall() .getFlow()); if (!status.isSuccess()) { log.warn("SDN Plugin failed to program the flow: {}. The failure is: {}", entry.getInstall(), status.getDescription()); return status; } log.trace("Added {}", entry.getInstall()); // Update DB entry.setRequestId(status.getRequestId()); updateLocalDatabase(entry, true); return status; } } /** * Returns true if the flow conflicts with all the container's flows. This * means that if the function returns true, the passed flow entry is * congruent with at least one container flow, hence it is good to be * installed on this container. * * @param flowEntry * @return true if flow conflicts with all the container flows, false * otherwise */ private boolean entryConflictsWithContainerFlows(FlowEntry flowEntry) { List<ContainerFlow> cFlowList = container.getContainerFlows(); // Validity check and avoid unnecessary computation // Also takes care of default container where no container flows are // present if (cFlowList == null || cFlowList.isEmpty()) { return false; } for (ContainerFlow cFlow : cFlowList) { if (cFlow.allowsFlow(flowEntry.getFlow())) { // Entry is allowed by at least one container flow: good to go return false; } } return true; } private void updateLocalDatabase(FlowEntryInstall entry, boolean add) { // Update the software view updateSwViewes(entry, add); // Update node indexed flow database updateNodeFlowsDB(entry, add); // Update group indexed flow database updateGroupFlowsDB(entry, add); } /* * Update the node mapped flows database */ private void updateSwViewes(FlowEntryInstall flowEntries, boolean add) { if (add) { originalSwView.put(flowEntries.getOriginal(), flowEntries.getOriginal()); installedSwView.put(flowEntries, flowEntries); } else { originalSwView.remove(flowEntries.getOriginal()); installedSwView.remove(flowEntries); } } /* * Update the node mapped flows database */ private void updateNodeFlowsDB(FlowEntryInstall flowEntries, boolean add) { Node node = flowEntries.getNode(); List<FlowEntryInstall> nodeIndeces = this.nodeFlows.get(node); if (nodeIndeces == null) { if (!add) { return; } else { nodeIndeces = new ArrayList<FlowEntryInstall>(); } } if (add) { nodeIndeces.add(flowEntries); } else { nodeIndeces.remove(flowEntries); } // Update cache across cluster if (nodeIndeces.isEmpty()) { this.nodeFlows.remove(node); } else { this.nodeFlows.put(node, nodeIndeces); } } /* * Update the group name mapped flows database */ private void updateGroupFlowsDB(FlowEntryInstall flowEntries, boolean add) { String groupName = flowEntries.getGroupName(); // Flow may not be part of a group if (groupName == null) { return; } List<FlowEntryInstall> indices = this.groupFlows.get(groupName); if (indices == null) { if (!add) { return; } else { indices = new ArrayList<FlowEntryInstall>(); } } if (add) { indices.add(flowEntries); } else { indices.remove(flowEntries); } // Update cache across cluster if (indices.isEmpty()) { this.groupFlows.remove(groupName); } else { this.groupFlows.put(groupName, indices); } } /** * Remove a flow entry that has been added previously First checks if the * entry is effectively present in the local database */ @SuppressWarnings("unused") private Status removeEntry(Node node, String flowName) { FlowEntryInstall target = null; // Find in database for (FlowEntryInstall entry : installedSwView.values()) { if (entry.equalsByNodeAndName(node, flowName)) { target = entry; break; } } // If it is not there, stop any further processing if (target == null) { return new Status(StatusCode.SUCCESS, "Entry is not present"); } // Remove from node Status status = programmer.removeFlow(target.getNode(), target.getInstall().getFlow()); // Update DB if (status.isSuccess()) { updateLocalDatabase(target, false); } else { // log the error log.warn("SDN Plugin failed to remove the flow: {}. The failure is: {}", target.getInstall(), status.getDescription()); } return status; } @Override public Status installFlowEntry(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = addEntry(flowEntry, false); } else { String msg = "Controller in container mode: Install Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, flowEntry); } return status; } @Override public Status installFlowEntryAsync(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = addEntry(flowEntry, true); } else { String msg = "Controller in container mode: Install Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } @Override public Status uninstallFlowEntry(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = removeEntry(flowEntry, false); } else { String msg = "Controller in container mode: Uninstall Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, flowEntry); } return status; } @Override public Status uninstallFlowEntryAsync(FlowEntry flowEntry) { Status status; if (isContainerModeAllowed(flowEntry)) { status = removeEntry(flowEntry, true); } else { String msg = "Controller in container mode: Uninstall Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } @Override public Status modifyFlowEntry(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) { Status status = null; if (isContainerModeAllowed(currentFlowEntry)) { status = modifyEntry(currentFlowEntry, newFlowEntry, false); } else { String msg = "Controller in container mode: Modify Refused"; String logMsg = msg + ": {}"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(logMsg, newFlowEntry); } return status; } @Override public Status modifyFlowEntryAsync(FlowEntry currentFlowEntry, FlowEntry newFlowEntry) { Status status = null; if (isContainerModeAllowed(currentFlowEntry)) { status = modifyEntry(currentFlowEntry, newFlowEntry, true); } else { String msg = "Controller in container mode: Modify Refused"; status = new Status(StatusCode.NOTACCEPTABLE, msg); log.warn(msg); } return status; } /** * Returns whether the specified flow entry is allowed to be * installed/removed/modified based on the current container mode status. * This call always returns true in the container instance of forwarding * rules manager. It is meant for the global instance only (default * container) of forwarding rules manager. Idea is that for assuring * container isolation of traffic, flow installation in default container is * blocked when in container mode (containers are present). The only flows * that are allowed in container mode in the default container are the * proactive flows, the ones automatically installed on the network node * which forwarding mode has been configured to "proactive". These flows are * needed by controller to discover the nodes topology and to discover the * attached hosts for some SDN switches. * * @param flowEntry * The flow entry to be installed/removed/modified * @return true if not in container mode or if flowEntry is internally * generated */ private boolean isContainerModeAllowed(FlowEntry flowEntry) { return (!inContainerMode) ? true : flowEntry.isInternal(); } @Override public Status modifyOrAddFlowEntry(FlowEntry newFlowEntry) { /* * Run a check on the original entries to decide whether to go with a * add or modify method. A loose check means only check against the * original flow entry requests and not against the installed flow * entries which are the result of the original entry merged with the * container flow(s) (if any). The modifyFlowEntry method in presence of * conflicts with the Container flows (if any) would revert back to a * delete + add pattern */ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry); if (currentFlowEntry != null) { return modifyFlowEntry(currentFlowEntry, newFlowEntry); } else { return installFlowEntry(newFlowEntry); } } @Override public Status modifyOrAddFlowEntryAsync(FlowEntry newFlowEntry) { /* * Run a check on the original entries to decide whether to go with a * add or modify method. A loose check means only check against the * original flow entry requests and not against the installed flow * entries which are the result of the original entry merged with the * container flow(s) (if any). The modifyFlowEntry method in presence of * conflicts with the Container flows (if any) would revert back to a * delete + add pattern */ FlowEntry currentFlowEntry = originalSwView.get(newFlowEntry); if (currentFlowEntry != null) { return modifyFlowEntryAsync(currentFlowEntry, newFlowEntry); } else { return installFlowEntryAsync(newFlowEntry); } } @Override public Status uninstallFlowEntryGroup(String groupName) { if (groupName == null || groupName.isEmpty()) { return new Status(StatusCode.BADREQUEST, "Invalid group name"); } if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) { return new Status(StatusCode.BADREQUEST, "Internal static flows group cannot be deleted through this api"); } if (inContainerMode) { String msg = "Controller in container mode: Group Uninstall Refused"; String logMsg = msg + ": {}"; log.warn(logMsg, groupName); return new Status(StatusCode.NOTACCEPTABLE, msg); } int toBeRemoved = 0; String error = ""; if (groupFlows.containsKey(groupName)) { List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName)); toBeRemoved = list.size(); for (FlowEntryInstall entry : list) { Status status = this.removeEntry(entry.getOriginal(), false); if (status.isSuccess()) { toBeRemoved -= 1; } else { error = status.getDescription(); } } } return (toBeRemoved == 0) ? new Status(StatusCode.SUCCESS) : new Status(StatusCode.INTERNALERROR, "Not all the flows were removed: " + error); } @Override public Status uninstallFlowEntryGroupAsync(String groupName) { if (groupName == null || groupName.isEmpty()) { return new Status(StatusCode.BADREQUEST, "Invalid group name"); } if (groupName.equals(FlowConfig.INTERNALSTATICFLOWGROUP)) { return new Status(StatusCode.BADREQUEST, "Static flows group cannot be deleted through this api"); } if (inContainerMode) { String msg = "Controller in container mode: Group Uninstall Refused"; String logMsg = msg + ": {}"; log.warn(logMsg, groupName); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (groupFlows.containsKey(groupName)) { List<FlowEntryInstall> list = new ArrayList<FlowEntryInstall>(groupFlows.get(groupName)); for (FlowEntryInstall entry : list) { this.removeEntry(entry.getOriginal(), true); } } return new Status(StatusCode.SUCCESS); } @Override public boolean checkFlowEntryConflict(FlowEntry flowEntry) { return entryConflictsWithContainerFlows(flowEntry); } /** * Updates all installed flows because the container flow got updated This * is obtained in two phases on per node basis: 1) Uninstall of all flows 2) * Reinstall of all flows This is needed because a new container flows * merged flow may conflict with an existing old container flows merged flow * on the network node */ private void updateFlowsContainerFlow() { Set<FlowEntry> toReInstall = new HashSet<FlowEntry>(); // First remove all installed entries for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> entry : installedSwView.entrySet()) { FlowEntryInstall current = entry.getValue(); // Store the original entry toReInstall.add(current.getOriginal()); // Remove the old couples. No validity checks to be run, use the // internal remove this.removeEntryInternal(current, false); } // Then reinstall the original entries for (FlowEntry entry : toReInstall) { // Reinstall the original flow entries, via the regular path: new // cFlow merge + validations this.installFlowEntry(entry); } } private void nonClusterObjectCreate() { originalSwView = new ConcurrentHashMap<FlowEntry, FlowEntry>(); installedSwView = new ConcurrentHashMap<FlowEntryInstall, FlowEntryInstall>(); nodeFlows = new ConcurrentHashMap<Node, List<FlowEntryInstall>>(); groupFlows = new ConcurrentHashMap<String, List<FlowEntryInstall>>(); TSPolicies = new ConcurrentHashMap<String, Object>(); staticFlowsOrdinal = new ConcurrentHashMap<Integer, Integer>(); portGroupConfigs = new ConcurrentHashMap<String, PortGroupConfig>(); portGroupData = new ConcurrentHashMap<PortGroupConfig, Map<Node, PortGroup>>(); staticFlows = new ConcurrentHashMap<Integer, FlowConfig>(); inactiveFlows = new ConcurrentHashMap<FlowEntry, FlowEntry>(); } private void registerWithOSGIConsole() { BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass()).getBundleContext(); bundleContext.registerService(CommandProvider.class.getName(), this, null); } @Override public void setTSPolicyData(String policyname, Object o, boolean add) { if (add) { /* Check if this policy already exists */ if (!(TSPolicies.containsKey(policyname))) { TSPolicies.put(policyname, o); } } else { TSPolicies.remove(policyname); } if (frmAware != null) { synchronized (frmAware) { for (IForwardingRulesManagerAware frma : frmAware) { try { frma.policyUpdate(policyname, add); } catch (Exception e) { log.warn("Exception on callback", e); } } } } } @Override public Map<String, Object> getTSPolicyData() { return TSPolicies; } @Override public Object getTSPolicyData(String policyName) { if (TSPolicies.containsKey(policyName)) { return TSPolicies.get(policyName); } else { return null; } } @Override public List<FlowEntry> getFlowEntriesForGroup(String policyName) { List<FlowEntry> list = new ArrayList<FlowEntry>(); if (policyName != null && !policyName.trim().isEmpty()) { for (Map.Entry<FlowEntry, FlowEntry> entry : this.originalSwView.entrySet()) { if (policyName.equals(entry.getKey().getGroupName())) { list.add(entry.getKey().clone()); } } } return list; } @Override public List<FlowEntry> getInstalledFlowEntriesForGroup(String policyName) { List<FlowEntry> list = new ArrayList<FlowEntry>(); if (policyName != null && !policyName.trim().isEmpty()) { for (Map.Entry<FlowEntryInstall, FlowEntryInstall> entry : this.installedSwView.entrySet()) { if (policyName.equals(entry.getKey().getGroupName())) { list.add(entry.getKey().getInstall().clone()); } } } return list; } @Override public void addOutputPort(Node node, String flowName, List<NodeConnector> portList) { for (FlowEntryInstall flow : this.nodeFlows.get(node)) { if (flow.getFlowName().equals(flowName)) { FlowEntry currentFlowEntry = flow.getOriginal(); FlowEntry newFlowEntry = currentFlowEntry.clone(); for (NodeConnector dstPort : portList) { newFlowEntry.getFlow().addAction(new Output(dstPort)); } Status error = modifyEntry(currentFlowEntry, newFlowEntry, false); if (error.isSuccess()) { log.info("Ports {} added to FlowEntry {}", portList, flowName); } else { log.warn("Failed to add ports {} to Flow entry {}. The failure is: {}", portList, currentFlowEntry.toString(), error.getDescription()); } return; } } log.warn("Failed to add ports to Flow {} on Node {}: Entry Not Found", flowName, node); } @Override public void removeOutputPort(Node node, String flowName, List<NodeConnector> portList) { for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { FlowEntry currentFlowEntry = flow.getOriginal(); FlowEntry newFlowEntry = currentFlowEntry.clone(); for (NodeConnector dstPort : portList) { Action action = new Output(dstPort); newFlowEntry.getFlow().removeAction(action); } Status status = modifyEntry(currentFlowEntry, newFlowEntry, false); if (status.isSuccess()) { log.info("Ports {} removed from FlowEntry {}", portList, flowName); } else { log.warn("Failed to remove ports {} from Flow entry {}. The failure is: {}", portList, currentFlowEntry.toString(), status.getDescription()); } return; } } log.warn("Failed to remove ports from Flow {} on Node {}: Entry Not Found", flowName, node); } /* * This function assumes the target flow has only one output port */ @Override public void replaceOutputPort(Node node, String flowName, NodeConnector outPort) { FlowEntry currentFlowEntry = null; FlowEntry newFlowEntry = null; // Find the flow for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { currentFlowEntry = flow.getOriginal(); break; } } if (currentFlowEntry == null) { log.warn("Failed to replace output port for flow {} on node {}: Entry Not Found", flowName, node); return; } // Create a flow copy with the new output port newFlowEntry = currentFlowEntry.clone(); Action target = null; for (Action action : newFlowEntry.getFlow().getActions()) { if (action.getType() == ActionType.OUTPUT) { target = action; break; } } newFlowEntry.getFlow().removeAction(target); newFlowEntry.getFlow().addAction(new Output(outPort)); // Modify on network node Status status = modifyEntry(currentFlowEntry, newFlowEntry, false); if (status.isSuccess()) { log.info("Output port replaced with {} for flow {} on node {}", outPort, flowName, node); } else { log.warn("Failed to replace output port for flow {} on node {}. The failure is: {}", flowName, node, status.getDescription()); } return; } @Override public NodeConnector getOutputPort(Node node, String flowName) { for (FlowEntryInstall index : this.nodeFlows.get(node)) { FlowEntryInstall flow = this.installedSwView.get(index); if (flow.getFlowName().equals(flowName)) { for (Action action : flow.getOriginal().getFlow().getActions()) { if (action.getType() == ActionType.OUTPUT) { return ((Output) action).getPort(); } } } } return null; } private void cacheStartup() { allocateCaches(); retrieveCaches(); } @SuppressWarnings("deprecation") private void allocateCaches() { if (this.clusterContainerService == null) { log.warn("Un-initialized clusterContainerService, can't create cache"); return; } log.debug("Allocating caches for Container {}", container.getName()); try { clusterContainerService.createCache("frm.originalSwView", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.installedSwView", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.inactiveFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.nodeFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.groupFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.staticFlows", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.flowsSaveEvent", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.staticFlowsOrdinal", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.portGroupConfigs", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.portGroupData", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache("frm.TSPolicies", EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache(WORKSTATUSCACHE, EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); clusterContainerService.createCache(WORKORDERCACHE, EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL)); } catch (CacheConfigException cce) { log.error("CacheConfigException"); } catch (CacheExistException cce) { log.error("CacheExistException"); } } @SuppressWarnings({ "unchecked", "deprecation" }) private void retrieveCaches() { ConcurrentMap<?, ?> map; if (this.clusterContainerService == null) { log.warn("un-initialized clusterContainerService, can't retrieve cache"); nonClusterObjectCreate(); return; } log.debug("Retrieving Caches for Container {}", container.getName()); map = clusterContainerService.getCache("frm.originalSwView"); if (map != null) { originalSwView = (ConcurrentMap<FlowEntry, FlowEntry>) map; } else { log.error("Retrieval of frm.originalSwView cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.installedSwView"); if (map != null) { installedSwView = (ConcurrentMap<FlowEntryInstall, FlowEntryInstall>) map; } else { log.error("Retrieval of frm.installedSwView cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.inactiveFlows"); if (map != null) { inactiveFlows = (ConcurrentMap<FlowEntry, FlowEntry>) map; } else { log.error("Retrieval of frm.inactiveFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.nodeFlows"); if (map != null) { nodeFlows = (ConcurrentMap<Node, List<FlowEntryInstall>>) map; } else { log.error("Retrieval of cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.groupFlows"); if (map != null) { groupFlows = (ConcurrentMap<String, List<FlowEntryInstall>>) map; } else { log.error("Retrieval of frm.groupFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.staticFlows"); if (map != null) { staticFlows = (ConcurrentMap<Integer, FlowConfig>) map; } else { log.error("Retrieval of frm.staticFlows cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.staticFlowsOrdinal"); if (map != null) { staticFlowsOrdinal = (ConcurrentMap<Integer, Integer>) map; } else { log.error("Retrieval of frm.staticFlowsOrdinal cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.portGroupConfigs"); if (map != null) { portGroupConfigs = (ConcurrentMap<String, PortGroupConfig>) map; } else { log.error("Retrieval of frm.portGroupConfigs cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.portGroupData"); if (map != null) { portGroupData = (ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>>) map; } else { log.error("Retrieval of frm.portGroupData allocation failed for Container {}", container.getName()); } map = clusterContainerService.getCache("frm.TSPolicies"); if (map != null) { TSPolicies = (ConcurrentMap<String, Object>) map; } else { log.error("Retrieval of frm.TSPolicies cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache(WORKORDERCACHE); if (map != null) { workOrder = (ConcurrentMap<FlowEntryDistributionOrder, FlowEntryInstall>) map; } else { log.error("Retrieval of " + WORKORDERCACHE + " cache failed for Container {}", container.getName()); } map = clusterContainerService.getCache(WORKSTATUSCACHE); if (map != null) { workStatus = (ConcurrentMap<FlowEntryDistributionOrder, Status>) map; } else { log.error("Retrieval of " + WORKSTATUSCACHE + " cache failed for Container {}", container.getName()); } } private boolean flowConfigExists(FlowConfig config) { // Flow name has to be unique on per node id basis for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(config)) { return true; } } return false; } @Override public Status addStaticFlow(FlowConfig config) { // Configuration object validation Status status = config.validate(container); if (!status.isSuccess()) { log.warn("Invalid Configuration for flow {}. The failure is {}", config, status.getDescription()); String error = "Invalid Configuration (" + status.getDescription() + ")"; config.setStatus(error); return new Status(StatusCode.BADREQUEST, error); } return addStaticFlowInternal(config, false); } /** * Private method to add a static flow configuration which does not run any * validation on the passed FlowConfig object. If restore is set to true, * configuration is stored in configuration database regardless the * installation on the network node was successful. This is useful at boot * when static flows are present in startup configuration and are read * before the switches connects. * * @param config * The static flow configuration * @param restore * if true, the configuration is stored regardless the * installation on the network node was successful * @return The status of this request */ private Status addStaticFlowInternal(FlowConfig config, boolean restore) { boolean multipleFlowPush = false; String error; Status status; config.setStatus(SUCCESS); // Presence check if (flowConfigExists(config)) { error = "Entry with this name on specified switch already exists"; log.warn("Entry with this name on specified switch already exists: {}", config); config.setStatus(error); return new Status(StatusCode.CONFLICT, error); } if ((config.getIngressPort() == null) && config.getPortGroup() != null) { for (String portGroupName : portGroupConfigs.keySet()) { if (portGroupName.equalsIgnoreCase(config.getPortGroup())) { multipleFlowPush = true; break; } } if (!multipleFlowPush) { log.warn("Invalid Configuration(Invalid PortGroup Name) for flow {}", config); error = "Invalid Configuration (Invalid PortGroup Name)"; config.setStatus(error); return new Status(StatusCode.BADREQUEST, error); } } /* * If requested program the entry in hardware first before updating the * StaticFlow DB */ if (!multipleFlowPush) { // Program hw if (config.installInHw()) { FlowEntry entry = config.getFlowEntry(); status = this.installFlowEntry(entry); if (!status.isSuccess()) { config.setStatus(status.getDescription()); if (!restore) { return status; } } } } /* * When the control reaches this point, either of the following * conditions is true 1. This is a single entry configuration (non * PortGroup) and the hardware installation is successful 2. This is a * multiple entry configuration (PortGroup) and hardware installation is * NOT done directly on this event. 3. The User prefers to retain the * configuration in Controller and skip hardware installation. * * Hence it is safe to update the StaticFlow DB at this point. * * Note : For the case of PortGrouping, it is essential to have this DB * populated before the PortGroupListeners can query for the DB * triggered using portGroupChanged event... */ Integer ordinal = staticFlowsOrdinal.get(0); staticFlowsOrdinal.put(0, ++ordinal); staticFlows.put(ordinal, config); if (multipleFlowPush) { PortGroupConfig pgconfig = portGroupConfigs.get(config.getPortGroup()); Map<Node, PortGroup> existingData = portGroupData.get(pgconfig); if (existingData != null) { portGroupChanged(pgconfig, existingData, true); } } return new Status(StatusCode.SUCCESS); } private void addStaticFlowsToSwitch(Node node) { for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.getNode().equals(node)) { if (config.installInHw() && !config.getStatus().equals(SUCCESS)) { Status status = this.installFlowEntryAsync(config.getFlowEntry()); config.setStatus(status.getDescription()); } } } // Update cluster cache refreshClusterStaticFlowsStatus(node); } private void updateStaticFlowConfigsOnNodeDown(Node node) { log.trace("Updating Static Flow configs on node down: {}", node); List<Integer> toRemove = new ArrayList<Integer>(); for (Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.installInHw() && config.getNode().equals(node)) { if (config.isInternalFlow()) { // Take note of this controller generated static flow toRemove.add(entry.getKey()); } else { config.setStatus(NODEDOWN); } } } // Remove controller generated static flows for this node for (Integer index : toRemove) { staticFlows.remove(index); } // Update cluster cache refreshClusterStaticFlowsStatus(node); } private void updateStaticFlowConfigsOnContainerModeChange(UpdateType update) { log.trace("Updating Static Flow configs on container mode change: {}", update); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig config = entry.getValue(); if (config.isPortGroupEnabled()) { continue; } if (config.installInHw() && !config.isInternalFlow()) { switch (update) { case ADDED: config.setStatus("Removed from node because in container mode"); break; case REMOVED: config.setStatus(SUCCESS); break; default: } } } // Update cluster cache refreshClusterStaticFlowsStatus(null); } @Override public Status removeStaticFlow(FlowConfig config) { /* * No config.isInternal() check as NB does not take this path and GUI * cannot issue a delete on an internal generated flow. We need this * path to be accessible when switch mode is changed from proactive to * reactive, so that we can remove the internal generated LLDP and ARP * punt flows */ // Look for the target configuration entry Integer key = 0; FlowConfig target = null; for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(config)) { key = entry.getKey(); target = entry.getValue(); break; } } if (target == null) { return new Status(StatusCode.NOTFOUND, "Entry Not Present"); } // Program the network node Status status = this.uninstallFlowEntry(config.getFlowEntry()); // Update configuration database if programming was successful if (status.isSuccess()) { staticFlows.remove(key); } return status; } @Override public Status removeStaticFlow(String name, Node node) { // Look for the target configuration entry Integer key = 0; FlowConfig target = null; for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) { if (mapEntry.getValue().isByNameAndNodeIdEqual(name, node)) { key = mapEntry.getKey(); target = mapEntry.getValue(); break; } } if (target == null) { return new Status(StatusCode.NOTFOUND, "Entry Not Present"); } // Validity check for api3 entry point if (target.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be deleted"; String logMsg = msg + ": {}"; log.warn(logMsg, name); return new Status(StatusCode.NOTACCEPTABLE, msg); } if (target.isPortGroupEnabled()) { String msg = "Invalid operation: Port Group flows cannot be deleted through this API"; String logMsg = msg + ": {}"; log.warn(logMsg, name); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Program the network node Status status = this.removeEntry(target.getFlowEntry(), false); // Update configuration database if programming was successful if (status.isSuccess()) { staticFlows.remove(key); } return status; } @Override public Status modifyStaticFlow(FlowConfig newFlowConfig) { // Validity check for api3 entry point if (newFlowConfig.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be modified"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowConfig); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Validity Check Status status = newFlowConfig.validate(container); if (!status.isSuccess()) { String msg = "Invalid Configuration (" + status.getDescription() + ")"; newFlowConfig.setStatus(msg); log.warn("Invalid Configuration for flow {}. The failure is {}", newFlowConfig, status.getDescription()); return new Status(StatusCode.BADREQUEST, msg); } FlowConfig oldFlowConfig = null; Integer index = null; for (ConcurrentMap.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) { FlowConfig entry = mapEntry.getValue(); if (entry.isByNameAndNodeIdEqual(newFlowConfig.getName(), newFlowConfig.getNode())) { oldFlowConfig = entry; index = mapEntry.getKey(); break; } } if (oldFlowConfig == null) { String msg = "Attempt to modify a non existing static flow"; String logMsg = msg + ": {}"; log.warn(logMsg, newFlowConfig); return new Status(StatusCode.NOTFOUND, msg); } // Do not attempt to reinstall the flow, warn user if (newFlowConfig.equals(oldFlowConfig)) { String msg = "No modification detected"; log.info("Static flow modification skipped. New flow and old flow are the same: {}", newFlowConfig); return new Status(StatusCode.SUCCESS, msg); } // If flow is installed, program the network node status = new Status(StatusCode.SUCCESS, "Saved in config"); if (oldFlowConfig.installInHw()) { status = this.modifyFlowEntry(oldFlowConfig.getFlowEntry(), newFlowConfig.getFlowEntry()); } // Update configuration database if programming was successful if (status.isSuccess()) { newFlowConfig.setStatus(status.getDescription()); staticFlows.put(index, newFlowConfig); } return status; } @Override public Status toggleStaticFlowStatus(String name, Node node) { return toggleStaticFlowStatus(getStaticFlow(name, node)); } @Override public Status toggleStaticFlowStatus(FlowConfig config) { if (config == null) { String msg = "Invalid request: null flow config"; log.warn(msg); return new Status(StatusCode.BADREQUEST, msg); } // Validity check for api3 entry point if (config.isInternalFlow()) { String msg = "Invalid operation: Controller generated flow cannot be modified"; String logMsg = msg + ": {}"; log.warn(logMsg, config); return new Status(StatusCode.NOTACCEPTABLE, msg); } // Find the config entry Integer key = 0; FlowConfig target = null; for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig conf = entry.getValue(); if (conf.isByNameAndNodeIdEqual(config)) { key = entry.getKey(); target = conf; break; } } if (target != null) { // Program the network node Status status = (target.installInHw()) ? this.uninstallFlowEntry(target.getFlowEntry()) : this .installFlowEntry(target.getFlowEntry()); if (status.isSuccess()) { // Update Configuration database target.setStatus(SUCCESS); target.toggleInstallation(); staticFlows.put(key, target); } return status; } return new Status(StatusCode.NOTFOUND, "Unable to locate the entry. Failed to toggle status"); } /** * Reinsert all static flows entries in the cache to force cache updates in * the cluster. This is useful when only some parameters were changed in the * entries, like the status. * * @param node * The node for which the static flow configurations have to be * refreshed. If null, all nodes static flows will be refreshed. */ private void refreshClusterStaticFlowsStatus(Node node) { // Refresh cluster cache for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (node == null || entry.getValue().getNode().equals(node)) { staticFlows.put(entry.getKey(), entry.getValue()); } } } /** * Uninstall all the non-internal Flow Entries present in the software view. * If requested, a copy of each original flow entry will be stored in the * inactive list so that it can be re-applied when needed (This is typically * the case when running in the default container and controller moved to * container mode) * * @param preserveFlowEntries * if true, a copy of each original entry is stored in the * inactive list */ private void uninstallAllFlowEntries(boolean preserveFlowEntries) { log.info("Uninstalling all non-internal flows"); List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(); // Store entries / create target list for (ConcurrentMap.Entry<FlowEntryInstall, FlowEntryInstall> mapEntry : installedSwView.entrySet()) { FlowEntryInstall flowEntries = mapEntry.getValue(); // Skip internal generated static flows if (!flowEntries.isInternal()) { toRemove.add(flowEntries); // Store the original entries if requested if (preserveFlowEntries) { inactiveFlows.put(flowEntries.getOriginal(), flowEntries.getOriginal()); } } } // Now remove the entries for (FlowEntryInstall flowEntryHw : toRemove) { Status status = this.removeEntryInternal(flowEntryHw, false); if (!status.isSuccess()) { log.warn("Failed to remove entry: {}. The failure is: {}", flowEntryHw, status.getDescription()); } } } /** * Re-install all the Flow Entries present in the inactive list The inactive * list will be empty at the end of this call This function is called on the * default container instance of FRM only when the last container is deleted */ private void reinstallAllFlowEntries() { log.info("Reinstalling all inactive flows"); for (FlowEntry flowEntry : this.inactiveFlows.keySet()) { this.addEntry(flowEntry, false); } // Empty inactive list in any case inactiveFlows.clear(); } @Override public List<FlowConfig> getStaticFlows() { return getStaticFlowsOrderedList(staticFlows, staticFlowsOrdinal.get(0).intValue()); } // TODO: need to come out with a better algorithm for maintaining the order // of the configuration entries // with actual one, index associated to deleted entries cannot be reused and // map grows... private List<FlowConfig> getStaticFlowsOrderedList(ConcurrentMap<Integer, FlowConfig> flowMap, int maxKey) { List<FlowConfig> orderedList = new ArrayList<FlowConfig>(); for (int i = 0; i <= maxKey; i++) { FlowConfig entry = flowMap.get(i); if (entry != null) { orderedList.add(entry); } } return orderedList; } @Override public FlowConfig getStaticFlow(String name, Node node) { for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().isByNameAndNodeIdEqual(name, node)) { return entry.getValue(); } } return null; } @Override public List<FlowConfig> getStaticFlows(Node node) { List<FlowConfig> list = new ArrayList<FlowConfig>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().onNode(node)) { list.add(entry.getValue()); } } return list; } @Override public List<String> getStaticFlowNamesForNode(Node node) { List<String> list = new ArrayList<String>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { if (entry.getValue().onNode(node)) { list.add(entry.getValue().getName()); } } return list; } @Override public List<Node> getListNodeWithConfiguredFlows() { Set<Node> set = new HashSet<Node>(); for (ConcurrentMap.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { set.add(entry.getValue().getNode()); } return new ArrayList<Node>(set); } @SuppressWarnings("unchecked") private void loadFlowConfiguration() { ObjectReader objReader = new ObjectReader(); ConcurrentMap<Integer, FlowConfig> confList = (ConcurrentMap<Integer, FlowConfig>) objReader.read(this, frmFileName); ConcurrentMap<String, PortGroupConfig> pgConfig = (ConcurrentMap<String, PortGroupConfig>) objReader.read(this, portGroupFileName); if (pgConfig != null) { for (ConcurrentMap.Entry<String, PortGroupConfig> entry : pgConfig.entrySet()) { addPortGroupConfig(entry.getKey(), entry.getValue().getMatchString(), true); } } if (confList == null) { return; } int maxKey = 0; for (Integer key : confList.keySet()) { if (key.intValue() > maxKey) { maxKey = key.intValue(); } } for (FlowConfig conf : getStaticFlowsOrderedList(confList, maxKey)) { addStaticFlowInternal(conf, true); } } @Override public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { return ois.readObject(); } @Override public Status saveConfig() { return saveConfigInternal(); } private Status saveConfigInternal() { ObjectWriter objWriter = new ObjectWriter(); ConcurrentMap<Integer, FlowConfig> nonDynamicFlows = new ConcurrentHashMap<Integer, FlowConfig>(); for (Integer ordinal : staticFlows.keySet()) { FlowConfig config = staticFlows.get(ordinal); // Do not save dynamic and controller generated static flows if (config.isDynamic() || config.isInternalFlow()) { continue; } nonDynamicFlows.put(ordinal, config); } objWriter.write(nonDynamicFlows, frmFileName); objWriter.write(new ConcurrentHashMap<String, PortGroupConfig>(portGroupConfigs), portGroupFileName); return new Status(StatusCode.SUCCESS, null); } @Override public void subnetNotify(Subnet sub, boolean add) { } private void installImplicitARPReplyPunt(Node node) { if (node == null) { return; } List<String> puntAction = new ArrayList<String>(); puntAction.add(ActionType.CONTROLLER.toString()); FlowConfig allowARP = new FlowConfig(); allowARP.setInstallInHw(true); allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP Reply" + FlowConfig.INTERNALSTATICFLOWEND); allowARP.setPriority("500"); allowARP.setNode(node); allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase()); allowARP.setDstMac(HexEncode.bytesToHexString(switchManager.getControllerMAC())); allowARP.setActions(puntAction); addStaticFlowInternal(allowARP, true); // skip validation on internal static flow name } @Override public void modeChangeNotify(Node node, boolean proactive) { List<FlowConfig> defaultConfigs = new ArrayList<FlowConfig>(); List<String> puntAction = new ArrayList<String>(); puntAction.add(ActionType.CONTROLLER.toString()); FlowConfig allowARP = new FlowConfig(); allowARP.setInstallInHw(true); allowARP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt ARP" + FlowConfig.INTERNALSTATICFLOWEND); allowARP.setPriority("1"); allowARP.setNode(node); allowARP.setEtherType("0x" + Integer.toHexString(EtherTypes.ARP.intValue()).toUpperCase()); allowARP.setActions(puntAction); defaultConfigs.add(allowARP); FlowConfig allowLLDP = new FlowConfig(); allowLLDP.setInstallInHw(true); allowLLDP.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Punt LLDP" + FlowConfig.INTERNALSTATICFLOWEND); allowLLDP.setPriority("1"); allowLLDP.setNode(node); allowLLDP.setEtherType("0x" + Integer.toHexString(EtherTypes.LLDP.intValue()).toUpperCase()); allowLLDP.setActions(puntAction); defaultConfigs.add(allowLLDP); List<String> dropAction = new ArrayList<String>(); dropAction.add(ActionType.DROP.toString()); FlowConfig dropAllConfig = new FlowConfig(); dropAllConfig.setInstallInHw(true); dropAllConfig.setName(FlowConfig.INTERNALSTATICFLOWBEGIN + "Catch-All Drop" + FlowConfig.INTERNALSTATICFLOWEND); dropAllConfig.setPriority("0"); dropAllConfig.setNode(node); dropAllConfig.setActions(dropAction); defaultConfigs.add(dropAllConfig); log.info("Forwarding mode for node {} set to {}", node, (proactive ? "proactive" : "reactive")); for (FlowConfig fc : defaultConfigs) { Status status = (proactive) ? addStaticFlowInternal(fc, false) : removeStaticFlow(fc); if (status.isSuccess()) { log.info("{} Proactive Static flow: {}", (proactive ? "Installed" : "Removed"), fc.getName()); } else { log.warn("Failed to {} Proactive Static flow: {}", (proactive ? "install" : "remove"), fc.getName()); } } } /** * Remove from the databases all the flows installed on the node * * @param node */ private void cleanDatabaseForNode(Node node) { log.info("Cleaning Flow database for Node {}", node); if (nodeFlows.containsKey(node)) { List<FlowEntryInstall> toRemove = new ArrayList<FlowEntryInstall>(nodeFlows.get(node)); for (FlowEntryInstall entry : toRemove) { updateLocalDatabase(entry, false); } } } private boolean doesFlowContainNodeConnector(Flow flow, NodeConnector nc) { if (nc == null) { return false; } Match match = flow.getMatch(); if (match.isPresent(MatchType.IN_PORT)) { NodeConnector matchPort = (NodeConnector) match.getField(MatchType.IN_PORT).getValue(); if (matchPort.equals(nc)) { return true; } } List<Action> actionsList = flow.getActions(); if (actionsList != null) { for (Action action : actionsList) { if (action instanceof Output) { NodeConnector actionPort = ((Output) action).getPort(); if (actionPort.equals(nc)) { return true; } } } } return false; } @Override public void notifyNode(Node node, UpdateType type, Map<String, Property> propMap) { this.pendingEvents.offer(new NodeUpdateEvent(type, node)); } @Override public void notifyNodeConnector(NodeConnector nodeConnector, UpdateType type, Map<String, Property> propMap) { } private FlowConfig getDerivedFlowConfig(FlowConfig original, String configName, Short port) { FlowConfig derivedFlow = new FlowConfig(original); derivedFlow.setDynamic(true); derivedFlow.setPortGroup(null); derivedFlow.setName(original.getName() + "_" + configName + "_" + port); derivedFlow.setIngressPort(port + ""); return derivedFlow; } private void addPortGroupFlows(PortGroupConfig config, Node node, PortGroup data) { for (FlowConfig staticFlow : staticFlows.values()) { if (staticFlow.getPortGroup() == null) { continue; } if ((staticFlow.getNode().equals(node)) && (staticFlow.getPortGroup().equals(config.getName()))) { for (Short port : data.getPorts()) { FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port); addStaticFlowInternal(derivedFlow, false); } } } } private void removePortGroupFlows(PortGroupConfig config, Node node, PortGroup data) { for (FlowConfig staticFlow : staticFlows.values()) { if (staticFlow.getPortGroup() == null) { continue; } if (staticFlow.getNode().equals(node) && staticFlow.getPortGroup().equals(config.getName())) { for (Short port : data.getPorts()) { FlowConfig derivedFlow = getDerivedFlowConfig(staticFlow, config.getName(), port); removeStaticFlow(derivedFlow); } } } } @Override public void portGroupChanged(PortGroupConfig config, Map<Node, PortGroup> data, boolean add) { log.info("PortGroup Changed for: {} Data: {}", config, portGroupData); Map<Node, PortGroup> existingData = portGroupData.get(config); if (existingData != null) { for (Map.Entry<Node, PortGroup> entry : data.entrySet()) { PortGroup existingPortGroup = existingData.get(entry.getKey()); if (existingPortGroup == null) { if (add) { existingData.put(entry.getKey(), entry.getValue()); addPortGroupFlows(config, entry.getKey(), entry.getValue()); } } else { if (add) { existingPortGroup.getPorts().addAll(entry.getValue().getPorts()); addPortGroupFlows(config, entry.getKey(), entry.getValue()); } else { existingPortGroup.getPorts().removeAll(entry.getValue().getPorts()); removePortGroupFlows(config, entry.getKey(), entry.getValue()); } } } } else { if (add) { portGroupData.put(config, data); for (Node swid : data.keySet()) { addPortGroupFlows(config, swid, data.get(swid)); } } } } @Override public boolean addPortGroupConfig(String name, String regex, boolean restore) { PortGroupConfig config = portGroupConfigs.get(name); if (config != null) { return false; } if ((portGroupProvider == null) && !restore) { return false; } if ((portGroupProvider != null) && (!portGroupProvider.isMatchCriteriaSupported(regex))) { return false; } config = new PortGroupConfig(name, regex); portGroupConfigs.put(name, config); if (portGroupProvider != null) { portGroupProvider.createPortGroupConfig(config); } return true; } @Override public boolean delPortGroupConfig(String name) { PortGroupConfig config = portGroupConfigs.get(name); if (config == null) { return false; } if (portGroupProvider != null) { portGroupProvider.deletePortGroupConfig(config); } portGroupConfigs.remove(name); return true; } private void usePortGroupConfig(String name) { PortGroupConfig config = portGroupConfigs.get(name); if (config == null) { return; } if (portGroupProvider != null) { Map<Node, PortGroup> data = portGroupProvider.getPortGroupData(config); portGroupData.put(config, data); } } @Override public Map<String, PortGroupConfig> getPortGroupConfigs() { return portGroupConfigs; } public boolean isPortGroupSupported() { if (portGroupProvider == null) { return false; } return true; } public void setIContainer(IContainer s) { this.container = s; } public void unsetIContainer(IContainer s) { if (this.container == s) { this.container = null; } } @Override public PortGroupProvider getPortGroupProvider() { return portGroupProvider; } public void unsetPortGroupProvider(PortGroupProvider portGroupProvider) { this.portGroupProvider = null; } public void setPortGroupProvider(PortGroupProvider portGroupProvider) { this.portGroupProvider = portGroupProvider; portGroupProvider.registerPortGroupChange(this); for (PortGroupConfig config : portGroupConfigs.values()) { portGroupProvider.createPortGroupConfig(config); } } public void setFrmAware(IForwardingRulesManagerAware obj) { this.frmAware.add(obj); } public void unsetFrmAware(IForwardingRulesManagerAware obj) { this.frmAware.remove(obj); } void setClusterContainerService(IClusterContainerServices s) { log.debug("Cluster Service set"); this.clusterContainerService = s; } void unsetClusterContainerService(IClusterContainerServices s) { if (this.clusterContainerService == s) { log.debug("Cluster Service removed!"); this.clusterContainerService = null; } } private String getContainerName() { if (container == null) { return GlobalConstants.DEFAULT.toString(); } return container.getName(); } /** * Function called by the dependency manager when all the required * dependencies are satisfied * */ void init() { frmFileName = GlobalConstants.STARTUPHOME.toString() + "frm_staticflows_" + this.getContainerName() + ".conf"; portGroupFileName = GlobalConstants.STARTUPHOME.toString() + "portgroup_" + this.getContainerName() + ".conf"; inContainerMode = false; if (portGroupProvider != null) { portGroupProvider.registerPortGroupChange(this); } cacheStartup(); registerWithOSGIConsole(); /* * If we are not the first cluster node to come up, do not initialize * the static flow entries ordinal */ if (staticFlowsOrdinal.size() == 0) { staticFlowsOrdinal.put(0, Integer.valueOf(0)); } pendingEvents = new LinkedBlockingQueue<FRMEvent>(); // Initialize the event handler thread frmEventHandler = new Thread(new Runnable() { @Override public void run() { while (!stopping) { try { FRMEvent event = pendingEvents.take(); if (event == null) { log.warn("Dequeued null event"); continue; } if (event instanceof NodeUpdateEvent) { NodeUpdateEvent update = (NodeUpdateEvent) event; Node node = update.getNode(); switch (update.getUpdateType()) { case ADDED: addStaticFlowsToSwitch(node); break; case REMOVED: cleanDatabaseForNode(node); updateStaticFlowConfigsOnNodeDown(node); break; default: } } else if (event instanceof ErrorReportedEvent) { ErrorReportedEvent errEvent = (ErrorReportedEvent) event; processErrorEvent(errEvent); } else if (event instanceof WorkOrderEvent) { /* * Take care of handling the remote Work request */ WorkOrderEvent work = (WorkOrderEvent) event; FlowEntryDistributionOrder fe = work.getFe(); if (fe != null) { logsync.trace("Executing the workOrder {}", fe); Status gotStatus = null; FlowEntryInstall feiCurrent = fe.getEntry(); FlowEntryInstall feiNew = workOrder.get(fe.getEntry()); switch (fe.getUpType()) { case ADDED: /* * TODO: Not still sure how to handle the * sync entries */ gotStatus = addEntriesInternal(feiCurrent, true); break; case CHANGED: gotStatus = modifyEntryInternal(feiCurrent, feiNew, true); break; case REMOVED: gotStatus = removeEntryInternal(feiCurrent, true); break; } // Remove the Order workOrder.remove(fe); logsync.trace( "The workOrder has been executed and now the status is being returned {}", fe); // Place the status workStatus.put(fe, gotStatus); } else { log.warn("Not expected null WorkOrder", work); } } else if (event instanceof WorkStatusCleanup) { /* * Take care of handling the remote Work request */ WorkStatusCleanup work = (WorkStatusCleanup) event; FlowEntryDistributionOrder fe = work.getFe(); if (fe != null) { logsync.trace("The workStatus {} is being removed", fe); workStatus.remove(fe); } else { log.warn("Not expected null WorkStatus", work); } } else { log.warn("Dequeued unknown event {}", event.getClass() .getSimpleName()); } } catch (InterruptedException e) { // clear pending events pendingEvents.clear(); } } } }, "FRM EventHandler Collector"); } /** * Function called by the dependency manager when at least one dependency * become unsatisfied or when the component is shutting down because for * example bundle is being stopped. * */ void destroy() { // Interrupt the thread frmEventHandler.interrupt(); // Clear the pendingEvents queue pendingEvents.clear(); frmAware.clear(); workMonitor.clear(); } /** * Function called by dependency manager after "init ()" is called and after * the services provided by the class are registered in the service registry * */ void start() { // Initialize graceful stop flag stopping = false; // Start event handler thread frmEventHandler.start(); /* * Read startup and build database if we have not already gotten the * configurations synced from another node */ if (staticFlows.isEmpty()) { loadFlowConfiguration(); } // Allocate the executor service this.executor = Executors.newSingleThreadExecutor(); } /** * Function called by the dependency manager before the services exported by * the component are unregistered, this will be followed by a "destroy ()" * calls */ void stop() { stopping = true; uninstallAllFlowEntries(false); // Shutdown executor this.executor.shutdownNow(); } public void setFlowProgrammerService(IFlowProgrammerService service) { this.programmer = service; } public void unsetFlowProgrammerService(IFlowProgrammerService service) { if (this.programmer == service) { this.programmer = null; } } public void setSwitchManager(ISwitchManager switchManager) { this.switchManager = switchManager; } public void unsetSwitchManager(ISwitchManager switchManager) { if (this.switchManager == switchManager) { this.switchManager = null; } } @Override public void tagUpdated(String containerName, Node n, short oldTag, short newTag, UpdateType t) { if (!container.getName().equals(containerName)) { return; } } @Override public void containerFlowUpdated(String containerName, ContainerFlow previous, ContainerFlow current, UpdateType t) { if (!container.getName().equals(containerName)) { return; } log.trace("Container {}: Updating installed flows because of container flow change: {} {}", container.getName(), t, current); /* * Whether it is an addition or removal, we have to recompute the merged * flows entries taking into account all the current container flows * because flow merging is not an injective function */ updateFlowsContainerFlow(); } @Override public void nodeConnectorUpdated(String containerName, NodeConnector nc, UpdateType t) { if (!container.getName().equals(containerName)) { return; } boolean updateStaticFlowCluster = false; switch (t) { case REMOVED: List<FlowEntryInstall> nodeFlowEntries = nodeFlows.get(nc.getNode()); if (nodeFlowEntries == null) { return; } for (FlowEntryInstall fei : new ArrayList<FlowEntryInstall>(nodeFlowEntries)) { if (doesFlowContainNodeConnector(fei.getInstall().getFlow(), nc)) { Status status = this.removeEntryInternal(fei, true); if (!status.isSuccess()) { continue; } /* * If the flow entry is a static flow, then update its * configuration */ if (fei.getGroupName().equals(FlowConfig.STATICFLOWGROUP)) { FlowConfig flowConfig = getStaticFlow(fei.getFlowName(), fei.getNode()); if (flowConfig != null) { flowConfig.setStatus(PORTREMOVED); updateStaticFlowCluster = true; } } } } if (updateStaticFlowCluster) { refreshClusterStaticFlowsStatus(nc.getNode()); } break; case ADDED: List<FlowConfig> flowConfigForNode = getStaticFlows(nc.getNode()); for (FlowConfig flowConfig : flowConfigForNode) { if (doesFlowContainNodeConnector(flowConfig.getFlow(), nc)) { if (flowConfig.installInHw()) { Status status = this.installFlowEntry(flowConfig.getFlowEntry()); if (!status.isSuccess()) { flowConfig.setStatus(status.getDescription()); } else { flowConfig.setStatus(SUCCESS); } updateStaticFlowCluster = true; } } } if (updateStaticFlowCluster) { refreshClusterStaticFlowsStatus(nc.getNode()); } break; case CHANGED: break; default: } } @Override public void containerModeUpdated(UpdateType update) { // Only default container instance reacts on this event if (!container.getName().equals(GlobalConstants.DEFAULT.toString())) { return; } switch (update) { case ADDED: /* * Controller is moving to container mode. We are in the default * container context, we need to remove all our non-internal flows * to prevent any container isolation breakage. We also need to * preserve our flow so that they can be re-installed if we move * back to non container mode (no containers). */ this.inContainerMode = true; this.uninstallAllFlowEntries(true); break; case REMOVED: this.inContainerMode = false; this.reinstallAllFlowEntries(); break; default: } // Update our configuration DB updateStaticFlowConfigsOnContainerModeChange(update); } protected abstract class FRMEvent { } private class NodeUpdateEvent extends FRMEvent { private final Node node; private final UpdateType update; public NodeUpdateEvent(UpdateType update, Node node) { this.update = update; this.node = node; } public UpdateType getUpdateType() { return update; } public Node getNode() { return node; } } private class ErrorReportedEvent extends FRMEvent { private final long rid; private final Node node; private final Object error; public ErrorReportedEvent(long rid, Node node, Object error) { this.rid = rid; this.node = node; this.error = error; } public long getRequestId() { return rid; } public Object getError() { return error; } public Node getNode() { return node; } } private class WorkOrderEvent extends FRMEvent { private FlowEntryDistributionOrder fe; private FlowEntryInstall newEntry; /** * @param fe * @param newEntry */ WorkOrderEvent(FlowEntryDistributionOrder fe, FlowEntryInstall newEntry) { this.fe = fe; this.newEntry = newEntry; } /** * @return the fe */ public FlowEntryDistributionOrder getFe() { return fe; } /** * @return the newEntry */ public FlowEntryInstall getNewEntry() { return newEntry; } } private class WorkStatusCleanup extends FRMEvent { private FlowEntryDistributionOrder fe; /** * @param fe */ WorkStatusCleanup(FlowEntryDistributionOrder fe) { this.fe = fe; } /** * @return the fe */ public FlowEntryDistributionOrder getFe() { return fe; } } /* * OSGI COMMANDS */ @Override public String getHelp() { StringBuffer help = new StringBuffer(); help.append("---FRM Matrix Application---\n"); help.append("\t printMatrixData - Prints the Matrix Configs\n"); help.append("\t addMatrixConfig <name> <regex>\n"); help.append("\t delMatrixConfig <name>\n"); help.append("\t useMatrixConfig <name>\n"); return help.toString(); } public void _printMatrixData(CommandInterpreter ci) { ci.println("Configs : "); ci.println("---------"); ci.println(portGroupConfigs); ci.println("Data : "); ci.println("------"); ci.println(portGroupData); } public void _addMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); String regex = ci.nextArgument(); addPortGroupConfig(name, regex, false); } public void _delMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); delPortGroupConfig(name); } public void _useMatrixConfig(CommandInterpreter ci) { String name = ci.nextArgument(); usePortGroupConfig(name); } public void _arpPunt(CommandInterpreter ci) { String switchId = ci.nextArgument(); long swid = HexEncode.stringToLong(switchId); Node node = NodeCreator.createOFNode(swid); installImplicitARPReplyPunt(node); } public void _frmaddflow(CommandInterpreter ci) throws UnknownHostException { Node node = null; String nodeId = ci.nextArgument(); if (nodeId == null) { ci.print("Node id not specified"); return; } try { node = NodeCreator.createOFNode(Long.valueOf(nodeId)); } catch (NumberFormatException e) { ci.print("Node id not a number"); return; } ci.println(this.programmer.addFlow(node, getSampleFlow(node))); } public void _frmremoveflow(CommandInterpreter ci) throws UnknownHostException { Node node = null; String nodeId = ci.nextArgument(); if (nodeId == null) { ci.print("Node id not specified"); return; } try { node = NodeCreator.createOFNode(Long.valueOf(nodeId)); } catch (NumberFormatException e) { ci.print("Node id not a number"); return; } ci.println(this.programmer.removeFlow(node, getSampleFlow(node))); } private Flow getSampleFlow(Node node) throws UnknownHostException { NodeConnector port = NodeConnectorCreator.createOFNodeConnector((short) 24, node); NodeConnector oport = NodeConnectorCreator.createOFNodeConnector((short) 30, node); byte srcMac[] = { (byte) 0x12, (byte) 0x34, (byte) 0x56, (byte) 0x78, (byte) 0x9a, (byte) 0xbc }; byte dstMac[] = { (byte) 0x1a, (byte) 0x2b, (byte) 0x3c, (byte) 0x4d, (byte) 0x5e, (byte) 0x6f }; InetAddress srcIP = InetAddress.getByName("172.28.30.50"); InetAddress dstIP = InetAddress.getByName("171.71.9.52"); InetAddress ipMask = InetAddress.getByName("255.255.255.0"); InetAddress ipMask2 = InetAddress.getByName("255.0.0.0"); short ethertype = EtherTypes.IPv4.shortValue(); short vlan = (short) 27; byte vlanPr = 3; Byte tos = 4; byte proto = IPProtocols.TCP.byteValue(); short src = (short) 55000; short dst = 80; /* * Create a SAL Flow aFlow */ Match match = new Match(); match.setField(MatchType.IN_PORT, port); match.setField(MatchType.DL_SRC, srcMac); match.setField(MatchType.DL_DST, dstMac); match.setField(MatchType.DL_TYPE, ethertype); match.setField(MatchType.DL_VLAN, vlan); match.setField(MatchType.DL_VLAN_PR, vlanPr); match.setField(MatchType.NW_SRC, srcIP, ipMask); match.setField(MatchType.NW_DST, dstIP, ipMask2); match.setField(MatchType.NW_TOS, tos); match.setField(MatchType.NW_PROTO, proto); match.setField(MatchType.TP_SRC, src); match.setField(MatchType.TP_DST, dst); List<Action> actions = new ArrayList<Action>(); actions.add(new Output(oport)); actions.add(new PopVlan()); actions.add(new Flood()); actions.add(new Controller()); return new Flow(match, actions); } @Override public Status saveConfiguration() { return saveConfig(); } public void _frmNodeFlows(CommandInterpreter ci) { String nodeId = ci.nextArgument(); Node node = Node.fromString(nodeId); if (node == null) { ci.println("frmNodeFlows <node> [verbose]"); return; } boolean verbose = false; String verboseCheck = ci.nextArgument(); if (verboseCheck != null) { verbose = verboseCheck.equals("true"); } if (!nodeFlows.containsKey(node)) { return; } // Dump per node database for (FlowEntryInstall entry : nodeFlows.get(node)) { if (!verbose) { ci.println(node + " " + installedSwView.get(entry).getFlowName()); } else { ci.println(node + " " + installedSwView.get(entry).toString()); } } } public void _frmGroupFlows(CommandInterpreter ci) { String group = ci.nextArgument(); if (group == null) { ci.println("frmGroupFlows <group> [verbose]"); return; } boolean verbose = false; String verboseCheck = ci.nextArgument(); if (verboseCheck != null) { verbose = verboseCheck.equalsIgnoreCase("true"); } if (!groupFlows.containsKey(group)) { return; } // Dump per node database ci.println("Group " + group + ":\n"); for (FlowEntryInstall flowEntry : groupFlows.get(group)) { if (!verbose) { ci.println(flowEntry.getNode() + " " + flowEntry.getFlowName()); } else { ci.println(flowEntry.getNode() + " " + flowEntry.toString()); } } } @Override public void flowRemoved(Node node, Flow flow) { log.trace("Received flow removed notification on {} for {}", node, flow); // For flow entry identification, only node, match and priority matter FlowEntryInstall test = new FlowEntryInstall(new FlowEntry("", "", flow, node), null); FlowEntryInstall installedEntry = this.installedSwView.get(test); if (installedEntry == null) { log.trace("Entry is not known to us"); return; } // Update Static flow status Integer key = 0; FlowConfig target = null; for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) { FlowConfig conf = entry.getValue(); if (conf.isByNameAndNodeIdEqual(installedEntry.getFlowName(), node)) { key = entry.getKey(); target = conf; break; } } if (target != null) { // Update Configuration database target.toggleInstallation(); target.setStatus(SUCCESS); staticFlows.put(key, target); } // Update software views this.updateLocalDatabase(installedEntry, false); } @Override public void flowErrorReported(Node node, long rid, Object err) { log.trace("Got error {} for message rid {} from node {}", new Object[] { err, rid, node }); pendingEvents.offer(new ErrorReportedEvent(rid, node, err)); } private void processErrorEvent(ErrorReportedEvent event) { Node node = event.getNode(); long rid = event.getRequestId(); Object error = event.getError(); String errorString = (error == null) ? "Not provided" : error.toString(); /* * If this was for a flow install, remove the corresponding entry from * the software view. If it was a Looking for the rid going through the * software database. TODO: A more efficient rid <-> FlowEntryInstall * mapping will have to be added in future */ FlowEntryInstall target = null; for (FlowEntryInstall index : nodeFlows.get(node)) { FlowEntryInstall entry = installedSwView.get(index); if (entry.getRequestId() == rid) { target = entry; break; } } if (target != null) { // This was a flow install, update database this.updateLocalDatabase(target, false); } // Notify listeners if (frmAware != null) { synchronized (frmAware) { for (IForwardingRulesManagerAware frma : frmAware) { try { frma.requestFailed(rid, errorString); } catch (Exception e) { log.warn("Failed to notify {}", frma); } } } } } @Override public Status solicitStatusResponse(Node node, boolean blocking) { Status rv = new Status(StatusCode.INTERNALERROR); if (this.programmer != null) { if (blocking) { rv = programmer.syncSendBarrierMessage(node); } else { rv = programmer.asyncSendBarrierMessage(node); } } return rv; } public void unsetIConnectionManager(IConnectionManager s) { if (s == this.connectionManager) { this.connectionManager = null; } } public void setIConnectionManager(IConnectionManager s) { this.connectionManager = s; } @Override public void entryCreated(Object key, String cacheName, boolean originLocal) { /* * Do nothing */ } @Override public void entryUpdated(Object key, Object new_value, String cacheName, boolean originLocal) { if (originLocal) { /* * Local updates are of no interest */ return; } if (cacheName.equals(WORKORDERCACHE)) { logsync.trace("Got a WorkOrderCacheUpdate for {}", key); /* * This is the case of one workOrder becoming available, so we need * to dispatch the work to the appropriate handler */ FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key; FlowEntryInstall fei = fe.getEntry(); if (fei == null) { return; } Node n = fei.getNode(); if (connectionManager.isLocal(n)) { logsync.trace("workOrder for fe {} processed locally", fe); // I'm the controller in charge for the request, queue it for // processing pendingEvents.offer(new WorkOrderEvent(fe, (FlowEntryInstall) new_value)); } } else if (cacheName.equals(WORKSTATUSCACHE)) { logsync.trace("Got a WorkStatusCacheUpdate for {}", key); /* * This is the case of one workOrder being completed and a status * returned */ FlowEntryDistributionOrder fe = (FlowEntryDistributionOrder) key; /* * Check if the order was initiated by this controller in that case * we need to actually look at the status returned */ if (fe.getRequestorController() .equals(clusterContainerService.getMyAddress())) { FlowEntryDistributionOrderFutureTask fet = workMonitor.get(fe); if (fet != null) { logsync.trace("workStatus response is for us {}", fe); // Signal we got the status fet.gotStatus(fe, workStatus.get(fe)); pendingEvents.offer(new WorkStatusCleanup(fe)); } } } } @Override public void entryDeleted(Object key, String cacheName, boolean originLocal) { /* * Do nothing */ } }
Addressing gerrit 1182 Madhu's comment - Making sure the executor service is available before the loadconfig gets called, just in case of some race condition Change-Id: Ib1d84c33536a68806d281d23855a6c0a5be2dd23 Signed-off-by: Giovanni Meo <[email protected]>
opendaylight/forwardingrulesmanager/implementation/src/main/java/org/opendaylight/controller/forwardingrulesmanager/internal/ForwardingRulesManager.java
Addressing gerrit 1182 Madhu's comment
<ide><path>pendaylight/forwardingrulesmanager/implementation/src/main/java/org/opendaylight/controller/forwardingrulesmanager/internal/ForwardingRulesManager.java <ide> // Initialize graceful stop flag <ide> stopping = false; <ide> <add> // Allocate the executor service <add> this.executor = Executors.newSingleThreadExecutor(); <add> <ide> // Start event handler thread <ide> frmEventHandler.start(); <ide> <ide> if (staticFlows.isEmpty()) { <ide> loadFlowConfiguration(); <ide> } <del> <del> // Allocate the executor service <del> this.executor = Executors.newSingleThreadExecutor(); <ide> } <ide> <ide> /**
Java
bsd-3-clause
c058dbb0f7d00a172eb1fb54ad76287b347db9b1
0
rnathanday/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,jimallman/dryad-repo,jimallman/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,jimallman/dryad-repo
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.dspace.doi; import java.sql.SQLException; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.MetadataSchema; /** * Convenience methods involved in registering DOIs. * @author dan */ public class DryadDOIRegistrationHelper { public static final String REGISTER_PENDING_PUBLICATION_STEP = "registerPendingPublicationStep"; public static boolean isDataPackageInPublicationBlackout(Item dataPackage) throws SQLException { // Publication blackout is indicated by provenance metadata boolean isInBlackout = false; DCValue provenance[] = dataPackage.getMetadata(MetadataSchema.DC_SCHEMA, "description", "provenance", Item.ANY); for(DCValue dcValue : provenance) { // only return true if the last recorded provenance indicates publication blackout if(dcValue.value != null) if(dcValue.value.contains("Entered publication blackout")) { isInBlackout = true; } else { isInBlackout = false; } } // now find something that would negate blackout return isInBlackout; } }
dspace/modules/doi/dspace-doi-api/src/main/java/org/dspace/doi/DryadDOIRegistrationHelper.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.dspace.doi; import java.sql.SQLException; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.MetadataSchema; /** * Convenience methods involved in registering DOIs. * @author dan */ public class DryadDOIRegistrationHelper { public static final String REGISTER_PENDING_PUBLICATION_STEP = "registerPendingPublicationStep"; public static boolean isDataPackageInPublicationBlackout(Item dataPackage) throws SQLException { // Publication blackout is indicated by provenance metadata boolean isInBlackout = false; DCValue provenance[] = dataPackage.getMetadata(MetadataSchema.DC_SCHEMA, "description", "provenance", "en"); for(DCValue dcValue : provenance) { // only return true if the last recorded provenance indicates publication blackout if(dcValue.value != null) if(dcValue.value.contains("Entered publication blackout")) { isInBlackout = true; } else { isInBlackout = false; } } // now find something that would negate blackout return isInBlackout; } }
Remove 'en' restriction when checking provenance for blackout
dspace/modules/doi/dspace-doi-api/src/main/java/org/dspace/doi/DryadDOIRegistrationHelper.java
Remove 'en' restriction when checking provenance for blackout
<ide><path>space/modules/doi/dspace-doi-api/src/main/java/org/dspace/doi/DryadDOIRegistrationHelper.java <ide> public static boolean isDataPackageInPublicationBlackout(Item dataPackage) throws SQLException { <ide> // Publication blackout is indicated by provenance metadata <ide> boolean isInBlackout = false; <del> DCValue provenance[] = dataPackage.getMetadata(MetadataSchema.DC_SCHEMA, "description", "provenance", "en"); <add> DCValue provenance[] = dataPackage.getMetadata(MetadataSchema.DC_SCHEMA, "description", "provenance", Item.ANY); <ide> for(DCValue dcValue : provenance) { <ide> // only return true if the last recorded provenance indicates publication blackout <ide> if(dcValue.value != null)
Java
agpl-3.0
57ac0a367eb2e0be0c80c757a35b715161bfd1a4
0
caiyingyuan/tigase-utils-71,caiyingyuan/tigase-utils-71
/* * Tigase Jabber/XMPP Utils * Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * * $Rev$ * Last modified by $Author$ * $Date$ */ package tigase.util; import java.net.UnknownHostException; /** * <code>JIDUtils</code> class contains static methods for <em>JIDUtils</em> * manipulation. * * <p> * Created: Thu Jan 27 22:53:41 2005 * </p> * @author <a href="mailto:[email protected]">Artur Hefczyc</a> * @version $Rev$ */ public abstract class JIDUtils { /** * Method <code>getNodeID</code> cuts off <em>resource</em> <em>JIDUtils</em> part * if exists and returns only node ID. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>ID</em> without resource * part. */ public static final String getNodeID(final String jid) { int idx = jid.indexOf('/'); return idx == -1 ? jid.toLowerCase() : jid.substring(0, idx).toLowerCase(); } /** * Method <code>getNodeID</code> parses given <em>JIDUtils</em> and returns * <em>resource</em> part of given <em>JIDUtils</em> or empty string if there * was no <em>resource</em> part. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>Resource</em> or empty * string. */ public static final String getNodeResource(final String jid) { int idx = jid.indexOf('/'); return idx == -1 ? null : jid.substring(idx+1).toLowerCase(); } /** * Method <code>getNodeHost</code> parses given <em>JIDUtils</em> and returns node * <em>domain</em> part. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>domain</em> part. */ public static final String getNodeHost(final String jid) { String id = getNodeID(jid); int idx = id.lastIndexOf('@'); return idx == -1 ? id.toLowerCase() : id.substring(idx+1).toLowerCase(); } /** * Method <code>getNodeHostIP</code> parses given <em>JIDUtils</em> for node * <em>domain</em> part and then tries to resolve host IP address.. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>domain</em> IP address. */ public static final String getNodeHostIP(final String jid) throws UnknownHostException { String domain = getNodeHost(jid); return DNSResolver.getHostSRV_IP(domain); } /** * Method <code>getNodeNick</code> parses given <em>JIDUtils</em> and returns * node nick name or empty string if nick name could not be found. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node nick name or empty string. */ public static final String getNodeNick(final String jid) { String id = getNodeID(jid); int idx = id.lastIndexOf('@'); return idx == -1 ? null : id.substring(0, idx).toLowerCase(); } /** * This is static method to construct user <em>ID</em> from given * <em>JIDUtils</em> parts. * This is not user session <em>ID</em> (<em>JIDUtils</em>), this is just * user <em>ID</em> - <em>JIDUtils</em> without resource part. * * @param nick a <code>String</code> value of node part of <em>JIDUtils</em>. * @param domain a <code>String</code> value of domain part of <em>JIDUtils</em>. */ public static final String getNodeID(final String nick, final String domain) { return ((nick != null && nick.length() > 0) ? (nick + "@" + domain).toLowerCase() : domain.toLowerCase()); } /** * <code>getJID</code> method builds valid JIDUtils string from given nick name, * domain and resource. It is aware of the fact that some elements might be * <code>null</code> and then they are not included in JIDUtils. <code>domain</code> * musn't be <code>null</code> however. * * @param nick a <code>String</code> value of JIDUtils's nick name. <code>null</code> * allowed. * @param domain a <code>String</code> value of JIDUtils's domain name. * <code>null</code> <strong>not</strong> allowed. * @param resource a <code>String</code> value of JIDUtils's resource. * @return a <code>String</code> value */ public static final String getJID(final String nick, final String domain, final String resource) { StringBuilder sb = new StringBuilder(); if (nick != null) { sb.append(nick + "@"); } // end of if (nick != null) if (domain == null) { throw new NullPointerException("Valid JIDUtils must contain at least domain name."); } // end of if (domain == null) sb.append(domain); if (resource != null) { sb.append("/" + resource); } // end of if (resource != null) return sb.toString().toLowerCase(); } /** * <code>checkNickName</code> method checks whether given string is a valid * nick name: not null, not zero length, doesn't contain invalid characters. * * @param nickname a <code>String</code> value of nick name to validate. * @return a <code>String</code> value <code>null</code> if nick name * is correct otherwise text with description of the problem. */ public static final String checkNickName(final String nickname) { if (nickname == null || nickname.trim().length() == 0) { return "Nickname empty."; } // end of if (new_comp_name == null || new_comp_name.length() == 0) if (nickname.contains(" ") || nickname.contains("\t") || nickname.contains("@") || nickname.contains("&")) { return "Nickname contains invalid characters."; } // end of if (!isValidCompName(new_comp_name)) return null; } } // JIDUtils
src/main/java/tigase/util/JIDUtils.java
/* * Tigase Jabber/XMPP Utils * Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. Look for COPYING file in the top folder. * If not, see http://www.gnu.org/licenses/. * * $Rev$ * Last modified by $Author$ * $Date$ */ package tigase.util; import java.net.InetAddress; import java.net.UnknownHostException; /** * <code>JIDUtils</code> class contains static methods for <em>JIDUtils</em> * manipulation. * * <p> * Created: Thu Jan 27 22:53:41 2005 * </p> * @author <a href="mailto:[email protected]">Artur Hefczyc</a> * @version $Rev$ */ public abstract class JIDUtils { /** * Method <code>getNodeID</code> cuts off <em>resource</em> <em>JIDUtils</em> part * if exists and returns only node ID. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>ID</em> without resource * part. */ public static final String getNodeID(final String jid) { int idx = jid.indexOf('/'); return idx == -1 ? jid.toLowerCase() : jid.substring(0, idx).toLowerCase(); } /** * Method <code>getNodeID</code> parses given <em>JIDUtils</em> and returns * <em>resource</em> part of given <em>JIDUtils</em> or empty string if there * was no <em>resource</em> part. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>Resource</em> or empty * string. */ public static final String getNodeResource(final String jid) { int idx = jid.indexOf('/'); return idx == -1 ? null : jid.substring(idx+1).toLowerCase(); } /** * Method <code>getNodeHost</code> parses given <em>JIDUtils</em> and returns node * <em>domain</em> part. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>domain</em> part. */ public static final String getNodeHost(final String jid) { String id = getNodeID(jid); int idx = id.lastIndexOf('@'); return idx == -1 ? id.toLowerCase() : id.substring(idx+1).toLowerCase(); } /** * Method <code>getNodeHostIP</code> parses given <em>JIDUtils</em> for node * <em>domain</em> part and then tries to resolve host IP address.. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node <em>domain</em> IP address. */ public static final String getNodeHostIP(final String jid) throws UnknownHostException { String domain = getNodeHost(jid); return DNSResolver.getHostSRV_IP(domain); } /** * Method <code>getNodeNick</code> parses given <em>JIDUtils</em> and returns * node nick name or empty string if nick name could not be found. * * @param jid a <code>String</code> value of <em>JIDUtils</em> to parse. * @return a <code>String</code> value of node nick name or empty string. */ public static final String getNodeNick(final String jid) { String id = getNodeID(jid); int idx = id.lastIndexOf('@'); return idx == -1 ? null : id.substring(0, idx).toLowerCase(); } /** * This is static method to construct user <em>ID</em> from given * <em>JIDUtils</em> parts. * This is not user session <em>ID</em> (<em>JIDUtils</em>), this is just * user <em>ID</em> - <em>JIDUtils</em> without resource part. * * @param nick a <code>String</code> value of node part of <em>JIDUtils</em>. * @param domain a <code>String</code> value of domain part of <em>JIDUtils</em>. */ public static final String getNodeID(final String nick, final String domain) { return ((nick != null && nick.length() > 0) ? (nick + "@" + domain).toLowerCase() : domain.toLowerCase()); } /** * <code>getJID</code> method builds valid JIDUtils string from given nick name, * domain and resource. It is aware of the fact that some elements might be * <code>null</code> and then they are not included in JIDUtils. <code>domain</code> * musn't be <code>null</code> however. * * @param nick a <code>String</code> value of JIDUtils's nick name. <code>null</code> * allowed. * @param domain a <code>String</code> value of JIDUtils's domain name. * <code>null</code> <strong>not</strong> allowed. * @param resource a <code>String</code> value of JIDUtils's resource. * @return a <code>String</code> value */ public static final String getJID(final String nick, final String domain, final String resource) { StringBuilder sb = new StringBuilder(); if (nick != null) { sb.append(nick + "@"); } // end of if (nick != null) if (domain == null) { throw new NullPointerException("Valid JIDUtils must contain at least domain name."); } // end of if (domain == null) sb.append(domain); if (resource != null) { sb.append("/" + resource); } // end of if (resource != null) return sb.toString().toLowerCase(); } /** * <code>checkNickName</code> method checks whether given string is a valid * nick name: not null, not zero length, doesn't contain invalid characters. * * @param nickname a <code>String</code> value of nick name to validate. * @return a <code>String</code> value <code>null</code> if nick name * is correct otherwise text with description of the problem. */ public static final String checkNickName(final String nickname) { if (nickname == null || nickname.trim().length() == 0) { return "Nickname empty."; } // end of if (new_comp_name == null || new_comp_name.length() == 0) if (nickname.contains(" ") || nickname.contains("\t") || nickname.contains("@") || nickname.contains("&")) { return "Nickname contains invalid characters."; } // end of if (!isValidCompName(new_comp_name)) return null; } } // JIDUtils
Unused import removed git-svn-id: 78a0b1024db9beb524bc745052f6db0c395bb78f@513 20a39203-4b1a-0410-9ea8-f7f58976c10f
src/main/java/tigase/util/JIDUtils.java
Unused import removed
<ide><path>rc/main/java/tigase/util/JIDUtils.java <ide> */ <ide> package tigase.util; <ide> <del>import java.net.InetAddress; <ide> import java.net.UnknownHostException; <ide> <ide> /**
Java
apache-2.0
06899077070ee7957632b4468b9a7c881c9cd48b
0
michael-rapp/ChromeLikeTabSwitcher
/* * Copyright 2016 - 2017 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.tabswitcher.gesture; import android.graphics.RectF; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.view.MotionEvent; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import static de.mrapp.android.util.Condition.ensureNotNull; /** * A dispatcher, which allows to dispatch touch events to multiple event handlers in the order of * their priority. Only the first event handler, which is suited to handle an event, is invoked. * * @author Michael Rapp * @since 1.0.0 */ public class TouchEventDispatcher implements Iterable<AbstractTouchEventHandler> { /** * Defines the interface, a class, which should be notified, when event handlers are added to or * removed from a {@link TouchEventDispatcher}, must implement. */ public interface Callback { /** * The method, which is invoked, when an event handler has been added. * * @param dispatcher * The dispatcher, the event handler has been added to, as an instance of the class * {@link TouchEventDispatcher}. The dispatcher may not be null * @param eventHandler * The event handler, which has been added, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ void onAddedEventHandler(@NonNull TouchEventDispatcher dispatcher, @NonNull AbstractTouchEventHandler eventHandler); /** * The method, which is invoked, when an event handler has been removed. * * @param dispatcher * The dispatcher, the event handler has been removed from, as an instance of the * class {@link TouchEventDispatcher}. The dispatcher may not be null * @param eventHandler * The event handler, which has been removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ void onRemovedEventHandler(@NonNull TouchEventDispatcher dispatcher, @NonNull AbstractTouchEventHandler eventHandler); } /** * An iterator, which allows to iterate the event handlers of a {@link TouchEventDispatcher}. */ private class EventHandlerIterator implements Iterator<AbstractTouchEventHandler> { /** * The iterator, which allows to iterate the priorities of the event handlers. */ private Iterator<Integer> priorityIterator; /** * The iterator, which allows to iterate the event handlers with the current priority. */ private Iterator<AbstractTouchEventHandler> eventHandlerIterator; /** * Creates a new iterator, which allows to iterate the event handlers of a {@link * TouchEventDispatcher}. */ public EventHandlerIterator() { priorityIterator = eventHandlers.keySet().iterator(); if (priorityIterator.hasNext()) { int key = priorityIterator.next(); Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); eventHandlerIterator = handlers.iterator(); } else { eventHandlerIterator = null; } } @Override public boolean hasNext() { return (eventHandlerIterator != null && eventHandlerIterator.hasNext()) || priorityIterator.hasNext(); } @Override public AbstractTouchEventHandler next() { if (eventHandlerIterator.hasNext()) { return eventHandlerIterator.next(); } else if (priorityIterator.hasNext()) { int key = priorityIterator.next(); Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); eventHandlerIterator = handlers.iterator(); return next(); } return null; } } /** * A sorted map, which contains the event handlers, touch events can be dispatched to. The * handlers are sorted by decreasing priority. */ private final SortedMap<Integer, Set<AbstractTouchEventHandler>> eventHandlers; /** * The event handler, which is currently active. */ private AbstractTouchEventHandler activeEventHandler; /** * The callback, which is notified, when event handlers are added or removed. */ private Callback callback; /** * Returns, whether a specific touch event occurred inside the touchable area of an event * handler. * * @param event * The touch event, which should be checked, as an instance of the class {@link * MotionEvent}. The touch event may not be null * @param eventHandler * The event handler as an instance of the class {@link AbstractTouchEventHandler}. The * event handler may not be null * @return True, if the given touch event occurred inside the touchable area, false otherwise */ private boolean isInsideTouchableArea(@NonNull final MotionEvent event, @NonNull final AbstractTouchEventHandler eventHandler) { RectF touchableArea = eventHandler.getTouchableArea(); return touchableArea == null || (event.getX() >= touchableArea.left && event.getX() <= touchableArea.right && event.getY() >= touchableArea.top && event.getY() <= touchableArea.bottom); } /** * Notifies the callback, that an event handler has been added to the dispatcher. * * @param eventHandler * The event handler, which has been added, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ private void notifyOnAddedEventHandler(@NonNull final AbstractTouchEventHandler eventHandler) { if (callback != null) { callback.onAddedEventHandler(this, eventHandler); } } /** * Notifies the callback, that an event handler has been removed from the dispatcher. * * @param eventHandler * The event handler, which has been removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ private void notifyOnRemovedEventHandler( @NonNull final AbstractTouchEventHandler eventHandler) { if (callback != null) { callback.onRemovedEventHandler(this, eventHandler); } } /** * Creates a new dispatcher, which allows to dispatch touch events to multiple event handlers in * the order of their priority. */ public TouchEventDispatcher() { this.eventHandlers = new TreeMap<>(Collections.reverseOrder()); this.activeEventHandler = null; this.callback = null; } /** * Sets the callback, which should be notified, when event handlers are added or removed. * * @param callback * The callback, which should be set, as an instance of the type {@link Callback} or * null, if no callback should be notified */ public final void setCallback(@Nullable final Callback callback) { this.callback = callback; } /** * Adds a specific event handler to the dispatcher. * * @param handler * The event handler, which should be added, as an instance of hte class {@link * AbstractTouchEventHandler}. The event handler may not be null */ public final void addEventHandler(@NonNull final AbstractTouchEventHandler handler) { ensureNotNull(handler, "The handler may not be null"); int key = handler.getPriority(); Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); if (handlers == null) { handlers = new LinkedHashSet<>(); eventHandlers.put(key, handlers); } handlers.add(handler); notifyOnAddedEventHandler(handler); } /** * Removes a specific event handler from the dispatcher. * * @param handler * The event handler, which should be removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ public final void removeEventHandler(@NonNull final AbstractTouchEventHandler handler) { ensureNotNull(handler, "The handler may not be null"); Collection<AbstractTouchEventHandler> handlers = eventHandlers.get(handler.getPriority()); if (handlers != null) { Iterator<AbstractTouchEventHandler> iterator = handlers.iterator(); AbstractTouchEventHandler eventHandler; while ((eventHandler = iterator.next()) != null) { if (handler.equals(eventHandler)) { iterator.remove(); notifyOnRemovedEventHandler(eventHandler); } } } if (handler.equals(activeEventHandler)) { activeEventHandler.onUp(null); activeEventHandler = null; } } /** * Handles a specific touch event by dispatching it to the first suited handler. * * @param event * The event, which should be dispatched, as an instance of the class {@link * MotionEvent}. The event may not be null * @return True, if the event has been handled, false otherwise */ public final boolean dispatchTouchEvent(@NonNull final MotionEvent event) { ensureNotNull(event, "The event may not be null"); boolean handled = false; if (activeEventHandler != null) { if (isInsideTouchableArea(event, activeEventHandler)) { handled = activeEventHandler.handleTouchEvent(event); } else { activeEventHandler.onUp(event); activeEventHandler = null; } } if (!handled) { Iterator<AbstractTouchEventHandler> iterator = iterator(); AbstractTouchEventHandler handler; while ((handler = iterator.next()) != null && !handled) { if (isInsideTouchableArea(event, handler)) { handled = handler.handleTouchEvent(event); } } } return handled; } @Override public final Iterator<AbstractTouchEventHandler> iterator() { return new EventHandlerIterator(); } }
library/src/main/java/de/mrapp/android/tabswitcher/gesture/TouchEventDispatcher.java
/* * Copyright 2016 - 2017 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.tabswitcher.gesture; import android.graphics.RectF; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.view.MotionEvent; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import static de.mrapp.android.util.Condition.ensureNotNull; /** * A dispatcher, which allows to dispatch touch events to multiple event handlers in the order of * their priority. Only the first event handler, which is suited to handle an event, is invoked. * * @author Michael Rapp * @since 1.0.0 */ public class TouchEventDispatcher { /** * Defines the interface, a class, which should be notified, when event handlers are added to or * removed from a {@link TouchEventDispatcher}, must implement. */ public interface Callback { /** * The method, which is invoked, when an event handler has been added. * * @param dispatcher * The dispatcher, the event handler has been added to, as an instance of the class * {@link TouchEventDispatcher}. The dispatcher may not be null * @param eventHandler * The event handler, which has been added, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ void onAddedEventHandler(@NonNull TouchEventDispatcher dispatcher, @NonNull AbstractTouchEventHandler eventHandler); /** * The method, which is invoked, when an event handler has been removed. * * @param dispatcher * The dispatcher, the event handler has been removed from, as an instance of the * class {@link TouchEventDispatcher}. The dispatcher may not be null * @param eventHandler * The event handler, which has been removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ void onRemovedEventHandler(@NonNull TouchEventDispatcher dispatcher, @NonNull AbstractTouchEventHandler eventHandler); } /** * A sorted map, which contains the event handlers, touch events can be dispatched to. The * handlers are sorted by decreasing priority. */ private final SortedMap<Integer, Set<AbstractTouchEventHandler>> eventHandlers; /** * The event handler, which is currently active. */ private AbstractTouchEventHandler activeEventHandler; /** * The callback, which is notified, when event handlers are added or removed. */ private Callback callback; /** * Returns, whether a specific touch event occurred inside the touchable area of an event * handler. * * @param event * The touch event, which should be checked, as an instance of the class {@link * MotionEvent}. The touch event may not be null * @param eventHandler * The event handler as an instance of the class {@link AbstractTouchEventHandler}. The * event handler may not be null * @return True, if the given touch event occurred inside the touchable area, false otherwise */ private boolean isInsideTouchableArea(@NonNull final MotionEvent event, @NonNull final AbstractTouchEventHandler eventHandler) { RectF touchableArea = eventHandler.getTouchableArea(); return touchableArea == null || (event.getX() >= touchableArea.left && event.getX() <= touchableArea.right && event.getY() >= touchableArea.top && event.getY() <= touchableArea.bottom); } /** * Notifies the callback, that an event handler has been added to the dispatcher. * * @param eventHandler * The event handler, which has been added, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ private void notifyOnAddedEventHandler(@NonNull final AbstractTouchEventHandler eventHandler) { if (callback != null) { callback.onAddedEventHandler(this, eventHandler); } } /** * Notifies the callback, that an event handler has been removed from the dispatcher. * * @param eventHandler * The event handler, which has been removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ private void notifyOnRemovedEventHandler( @NonNull final AbstractTouchEventHandler eventHandler) { if (callback != null) { callback.onRemovedEventHandler(this, eventHandler); } } /** * Creates a new dispatcher, which allows to dispatch touch events to multiple event handlers in * the order of their priority. */ public TouchEventDispatcher() { this.eventHandlers = new TreeMap<>(Collections.reverseOrder()); this.activeEventHandler = null; this.callback = null; } /** * Sets the callback, which should be notified, when event handlers are added or removed. * * @param callback * The callback, which should be set, as an instance of the type {@link Callback} or * null, if no callback should be notified */ public final void setCallback(@Nullable final Callback callback) { this.callback = callback; } /** * Adds a specific event handler to the dispatcher. * * @param handler * The event handler, which should be added, as an instance of hte class {@link * AbstractTouchEventHandler}. The event handler may not be null */ public final void addEventHandler(@NonNull final AbstractTouchEventHandler handler) { ensureNotNull(handler, "The handler may not be null"); int key = handler.getPriority(); Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); if (handlers == null) { handlers = new LinkedHashSet<>(); eventHandlers.put(key, handlers); } handlers.add(handler); notifyOnAddedEventHandler(handler); } /** * Removes a specific event handler from the dispatcher. * * @param handler * The event handler, which should be removed, as an instance of the class {@link * AbstractTouchEventHandler}. The event handler may not be null */ public final void removeEventHandler(@NonNull final AbstractTouchEventHandler handler) { ensureNotNull(handler, "The handler may not be null"); Collection<AbstractTouchEventHandler> handlers = eventHandlers.get(handler.getPriority()); if (handlers != null) { Iterator<AbstractTouchEventHandler> iterator = handlers.iterator(); AbstractTouchEventHandler eventHandler; while ((eventHandler = iterator.next()) != null) { if (handler.equals(eventHandler)) { iterator.remove(); notifyOnRemovedEventHandler(eventHandler); } } } if (handler.equals(activeEventHandler)) { activeEventHandler.onUp(null); activeEventHandler = null; } } /** * Handles a specific touch event by dispatching it to the first suited handler. * * @param event * The event, which should be dispatched, as an instance of the class {@link * MotionEvent}. The event may not be null * @return True, if the event has been handled, false otherwise */ public final boolean dispatchTouchEvent(@NonNull final MotionEvent event) { ensureNotNull(event, "The event may not be null"); boolean handled = false; if (activeEventHandler != null) { if (isInsideTouchableArea(event, activeEventHandler)) { handled = activeEventHandler.handleTouchEvent(event); } else { activeEventHandler.onUp(event); activeEventHandler = null; } } if (!handled) { Iterator<Map.Entry<Integer, Set<AbstractTouchEventHandler>>> entryIterator = eventHandlers.entrySet().iterator(); Map.Entry<Integer, Set<AbstractTouchEventHandler>> entry; while ((entry = entryIterator.next()) != null && !handled) { Iterator<AbstractTouchEventHandler> handlerIterator = entry.getValue().iterator(); AbstractTouchEventHandler handler; while ((handler = handlerIterator.next()) != null && !handled) { if (isInsideTouchableArea(event, handler)) { handled = handler.handleTouchEvent(event); } } } } return handled; } }
The class TouchEventDispatcher does now implement the interface Iterable.
library/src/main/java/de/mrapp/android/tabswitcher/gesture/TouchEventDispatcher.java
The class TouchEventDispatcher does now implement the interface Iterable.
<ide><path>ibrary/src/main/java/de/mrapp/android/tabswitcher/gesture/TouchEventDispatcher.java <ide> import java.util.Collections; <ide> import java.util.Iterator; <ide> import java.util.LinkedHashSet; <del>import java.util.Map; <ide> import java.util.Set; <ide> import java.util.SortedMap; <ide> import java.util.TreeMap; <ide> * @author Michael Rapp <ide> * @since 1.0.0 <ide> */ <del>public class TouchEventDispatcher { <add>public class TouchEventDispatcher implements Iterable<AbstractTouchEventHandler> { <ide> <ide> /** <ide> * Defines the interface, a class, which should be notified, when event handlers are added to or <ide> } <ide> <ide> /** <add> * An iterator, which allows to iterate the event handlers of a {@link TouchEventDispatcher}. <add> */ <add> private class EventHandlerIterator implements Iterator<AbstractTouchEventHandler> { <add> <add> /** <add> * The iterator, which allows to iterate the priorities of the event handlers. <add> */ <add> private Iterator<Integer> priorityIterator; <add> <add> /** <add> * The iterator, which allows to iterate the event handlers with the current priority. <add> */ <add> private Iterator<AbstractTouchEventHandler> eventHandlerIterator; <add> <add> /** <add> * Creates a new iterator, which allows to iterate the event handlers of a {@link <add> * TouchEventDispatcher}. <add> */ <add> public EventHandlerIterator() { <add> priorityIterator = eventHandlers.keySet().iterator(); <add> <add> if (priorityIterator.hasNext()) { <add> int key = priorityIterator.next(); <add> Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); <add> eventHandlerIterator = handlers.iterator(); <add> } else { <add> eventHandlerIterator = null; <add> } <add> } <add> <add> @Override <add> public boolean hasNext() { <add> return (eventHandlerIterator != null && eventHandlerIterator.hasNext()) || <add> priorityIterator.hasNext(); <add> } <add> <add> @Override <add> public AbstractTouchEventHandler next() { <add> if (eventHandlerIterator.hasNext()) { <add> return eventHandlerIterator.next(); <add> } else if (priorityIterator.hasNext()) { <add> int key = priorityIterator.next(); <add> Set<AbstractTouchEventHandler> handlers = eventHandlers.get(key); <add> eventHandlerIterator = handlers.iterator(); <add> return next(); <add> } <add> <add> return null; <add> } <add> <add> } <add> <add> /** <ide> * A sorted map, which contains the event handlers, touch events can be dispatched to. The <ide> * handlers are sorted by decreasing priority. <ide> */ <ide> } <ide> <ide> if (!handled) { <del> Iterator<Map.Entry<Integer, Set<AbstractTouchEventHandler>>> entryIterator = <del> eventHandlers.entrySet().iterator(); <del> Map.Entry<Integer, Set<AbstractTouchEventHandler>> entry; <del> <del> while ((entry = entryIterator.next()) != null && !handled) { <del> Iterator<AbstractTouchEventHandler> handlerIterator = entry.getValue().iterator(); <del> AbstractTouchEventHandler handler; <del> <del> while ((handler = handlerIterator.next()) != null && !handled) { <del> if (isInsideTouchableArea(event, handler)) { <del> handled = handler.handleTouchEvent(event); <del> } <add> Iterator<AbstractTouchEventHandler> iterator = iterator(); <add> AbstractTouchEventHandler handler; <add> <add> while ((handler = iterator.next()) != null && !handled) { <add> if (isInsideTouchableArea(event, handler)) { <add> handled = handler.handleTouchEvent(event); <ide> } <ide> } <ide> } <ide> return handled; <ide> } <ide> <add> @Override <add> public final Iterator<AbstractTouchEventHandler> iterator() { <add> return new EventHandlerIterator(); <add> } <add> <ide> }
Java
bsd-3-clause
38e37d9e7609b5e0180b1323316ff98fe60c6a72
0
webhost/jing-trang,webhost/jing-trang,webhost/jing-trang
package com.thaiopensource.relaxng.output.rng; import com.thaiopensource.relaxng.edit.AbstractVisitor; import com.thaiopensource.relaxng.edit.DefineComponent; import com.thaiopensource.relaxng.edit.DivComponent; import com.thaiopensource.relaxng.edit.IncludeComponent; import com.thaiopensource.relaxng.edit.GrammarPattern; import com.thaiopensource.relaxng.edit.Container; import com.thaiopensource.relaxng.edit.Component; import com.thaiopensource.relaxng.edit.UnaryPattern; import com.thaiopensource.relaxng.edit.CompositePattern; import com.thaiopensource.relaxng.edit.Pattern; import com.thaiopensource.relaxng.edit.NameClassedPattern; import com.thaiopensource.relaxng.edit.ChoiceNameClass; import com.thaiopensource.relaxng.edit.NameClass; import com.thaiopensource.relaxng.edit.ValuePattern; import com.thaiopensource.relaxng.edit.DataPattern; import com.thaiopensource.relaxng.edit.NameNameClass; import com.thaiopensource.relaxng.edit.AnyNameNameClass; import com.thaiopensource.relaxng.edit.NsNameNameClass; import com.thaiopensource.relaxng.edit.Annotated; import com.thaiopensource.relaxng.edit.AttributeAnnotation; import com.thaiopensource.relaxng.edit.AnnotationChild; import com.thaiopensource.relaxng.edit.ElementAnnotation; import com.thaiopensource.relaxng.edit.Param; import com.thaiopensource.relaxng.edit.AttributePattern; import com.thaiopensource.relaxng.parse.Context; import com.thaiopensource.xml.util.WellKnownNamespaces; import java.util.List; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Enumeration; class Analyzer extends AbstractVisitor { private Object visitAnnotated(Annotated anno) { if (anno.getAttributeAnnotations().size() > 0 || anno.getChildElementAnnotations().size() > 0 || anno.getFollowingElementAnnotations().size() > 0) noteContext(anno.getContext()); visitAnnotationAttributes(anno.getAttributeAnnotations()); visitAnnotationChildren(anno.getChildElementAnnotations()); visitAnnotationChildren(anno.getFollowingElementAnnotations()); return null; } private void visitAnnotationAttributes(List list) { for (int i = 0, len = list.size(); i < len; i++) { AttributeAnnotation att = (AttributeAnnotation)list.get(i); if (att.getNamespaceUri().length() != 0) noteNs(att.getPrefix(), att.getNamespaceUri()); } } private void visitAnnotationChildren(List list) { for (int i = 0, len = list.size(); i < len; i++) { AnnotationChild ac = (AnnotationChild)list.get(i); if (ac instanceof ElementAnnotation) { ElementAnnotation elem = (ElementAnnotation)ac; if (elem.getPrefix() != null) noteNs(elem.getPrefix(), elem.getNamespaceUri()); visitAnnotationAttributes(elem.getAttributes()); visitAnnotationChildren(elem.getChildren()); } } } public Object visitPattern(Pattern p) { return visitAnnotated(p); } public Object visitDefine(DefineComponent c) { visitAnnotated(c); return c.getBody().accept(this); } public Object visitDiv(DivComponent c) { visitAnnotated(c); return visitContainer(c); } public Object visitInclude(IncludeComponent c) { visitAnnotated(c); noteInheritNs(c.getNs()); return visitContainer(c); } public Object visitGrammar(GrammarPattern p) { visitAnnotated(p); return visitContainer(p); } private Object visitContainer(Container c) { List list = c.getComponents(); for (int i = 0, len = list.size(); i < len; i++) ((Component)list.get(i)).accept(this); return null; } public Object visitUnary(UnaryPattern p) { visitAnnotated(p); return p.getChild().accept(this); } public Object visitComposite(CompositePattern p) { visitAnnotated(p); List list = p.getChildren(); for (int i = 0, len = list.size(); i < len; i++) ((Pattern)list.get(i)).accept(this); return null; } public Object visitNameClassed(NameClassedPattern p) { p.getNameClass().accept(this); return visitUnary(p); } public Object visitAttribute(AttributePattern p) { NameClass nc = p.getNameClass(); if (nc instanceof NameNameClass && ((NameNameClass)nc).getNamespaceUri().equals("")) return visitUnary(p); return visitNameClassed(p); } public Object visitChoice(ChoiceNameClass nc) { visitAnnotated(nc); List list = nc.getChildren(); for (int i = 0, len = list.size(); i < len; i++) ((NameClass)list.get(i)).accept(this); return null; } public Object visitValue(ValuePattern p) { visitAnnotated(p); if (!p.getType().equals("token") || !p.getDatatypeLibrary().equals("")) noteDatatypeLibrary(p.getDatatypeLibrary()); for (Iterator iter = p.getPrefixMap().entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry)iter.next(); noteNs((String)entry.getKey(), (String)entry.getValue()); } return null; } public Object visitData(DataPattern p) { visitAnnotated(p); noteDatatypeLibrary(p.getDatatypeLibrary()); Pattern except = p.getExcept(); if (except != null) except.accept(this); for (Iterator iter = p.getParams().iterator(); iter.hasNext();) visitAnnotated((Param)iter.next()); return null; } public Object visitName(NameNameClass nc) { visitAnnotated(nc); noteNs(nc.getPrefix(), nc.getNamespaceUri()); return null; } public Object visitAnyName(AnyNameNameClass nc) { visitAnnotated(nc); NameClass except = nc.getExcept(); if (except != null) except.accept(this); return null; } public Object visitNsName(NsNameNameClass nc) { visitAnnotated(nc); noteInheritNs(nc.getNs()); NameClass except = nc.getExcept(); if (except != null) except.accept(this); return null; } private String datatypeLibrary = null; private final Map prefixMap = new HashMap(); private boolean haveInherit = false; private Context lastContext = null; private String noPrefixNs = null; private void noteDatatypeLibrary(String uri) { if (datatypeLibrary == null || datatypeLibrary.length() == 0) datatypeLibrary = uri; } private void noteInheritNs(String ns) { if (ns == NameClass.INHERIT_NS) haveInherit = true; else noPrefixNs = ns; } private void noteNs(String prefix, String ns) { if (ns == NameClass.INHERIT_NS) { haveInherit = true; return; } if (prefix == null) prefix = ""; if (ns == null || (ns.length() == 0 && prefix.length() != 0) || prefixMap.containsKey(prefix)) return; prefixMap.put(prefix, ns); } private void noteContext(Context context) { if (context == null || context == lastContext) return; lastContext = context; for (Enumeration enum = context.prefixes(); enum.hasMoreElements();) { String prefix = (String)enum.nextElement(); noteNs(prefix, context.resolveNamespacePrefix(prefix)); } } Map getPrefixMap() { if (haveInherit) prefixMap.remove(""); else if (noPrefixNs != null && !prefixMap.containsKey("")) prefixMap.put("", noPrefixNs); prefixMap.put("xml", WellKnownNamespaces.XML); return prefixMap; } String getDatatypeLibrary() { return datatypeLibrary; } }
trang/src/com/thaiopensource/relaxng/output/rng/Analyzer.java
package com.thaiopensource.relaxng.output.rng; import com.thaiopensource.relaxng.edit.AbstractVisitor; import com.thaiopensource.relaxng.edit.DefineComponent; import com.thaiopensource.relaxng.edit.DivComponent; import com.thaiopensource.relaxng.edit.IncludeComponent; import com.thaiopensource.relaxng.edit.GrammarPattern; import com.thaiopensource.relaxng.edit.Container; import com.thaiopensource.relaxng.edit.Component; import com.thaiopensource.relaxng.edit.UnaryPattern; import com.thaiopensource.relaxng.edit.CompositePattern; import com.thaiopensource.relaxng.edit.Pattern; import com.thaiopensource.relaxng.edit.NameClassedPattern; import com.thaiopensource.relaxng.edit.ChoiceNameClass; import com.thaiopensource.relaxng.edit.NameClass; import com.thaiopensource.relaxng.edit.ValuePattern; import com.thaiopensource.relaxng.edit.DataPattern; import com.thaiopensource.relaxng.edit.NameNameClass; import com.thaiopensource.relaxng.edit.AnyNameNameClass; import com.thaiopensource.relaxng.edit.NsNameNameClass; import com.thaiopensource.relaxng.edit.Annotated; import com.thaiopensource.relaxng.edit.AttributeAnnotation; import com.thaiopensource.relaxng.edit.AnnotationChild; import com.thaiopensource.relaxng.edit.ElementAnnotation; import com.thaiopensource.relaxng.edit.Param; import com.thaiopensource.relaxng.edit.AttributePattern; import com.thaiopensource.relaxng.parse.Context; import com.thaiopensource.xml.util.WellKnownNamespaces; import java.util.List; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Enumeration; class Analyzer extends AbstractVisitor { private Object visitAnnotated(Annotated anno) { if (anno.getAttributeAnnotations().size() > 0 || anno.getChildElementAnnotations().size() > 0 || anno.getFollowingElementAnnotations().size() > 0) noteContext(anno.getContext()); visitAnnotationAttributes(anno.getAttributeAnnotations()); visitAnnotationChildren(anno.getChildElementAnnotations()); visitAnnotationChildren(anno.getFollowingElementAnnotations()); return null; } private void visitAnnotationAttributes(List list) { for (int i = 0, len = list.size(); i < len; i++) { AttributeAnnotation att = (AttributeAnnotation)list.get(i); if (att.getNamespaceUri().length() != 0) noteNs(att.getPrefix(), att.getNamespaceUri()); } } private void visitAnnotationChildren(List list) { for (int i = 0, len = list.size(); i < len; i++) { AnnotationChild ac = (AnnotationChild)list.get(i); if (ac instanceof ElementAnnotation) { ElementAnnotation elem = (ElementAnnotation)ac; if (elem.getPrefix() != null) noteNs(elem.getPrefix(), elem.getNamespaceUri()); visitAnnotationAttributes(elem.getAttributes()); visitAnnotationChildren(elem.getChildren()); } } } public Object visitPattern(Pattern p) { return visitAnnotated(p); } public Object visitDefine(DefineComponent c) { visitAnnotated(c); return c.getBody().accept(this); } public Object visitDiv(DivComponent c) { visitAnnotated(c); return visitContainer(c); } public Object visitInclude(IncludeComponent c) { visitAnnotated(c); return visitContainer(c); } public Object visitGrammar(GrammarPattern p) { visitAnnotated(p); return visitContainer(p); } private Object visitContainer(Container c) { List list = c.getComponents(); for (int i = 0, len = list.size(); i < len; i++) ((Component)list.get(i)).accept(this); return null; } public Object visitUnary(UnaryPattern p) { visitAnnotated(p); return p.getChild().accept(this); } public Object visitComposite(CompositePattern p) { visitAnnotated(p); List list = p.getChildren(); for (int i = 0, len = list.size(); i < len; i++) ((Pattern)list.get(i)).accept(this); return null; } public Object visitNameClassed(NameClassedPattern p) { p.getNameClass().accept(this); return visitUnary(p); } public Object visitAttribute(AttributePattern p) { NameClass nc = p.getNameClass(); if (nc instanceof NameNameClass && ((NameNameClass)nc).getNamespaceUri().equals("")) return visitUnary(p); return visitNameClassed(p); } public Object visitChoice(ChoiceNameClass nc) { visitAnnotated(nc); List list = nc.getChildren(); for (int i = 0, len = list.size(); i < len; i++) ((NameClass)list.get(i)).accept(this); return null; } public Object visitValue(ValuePattern p) { visitAnnotated(p); if (!p.getType().equals("token") || !p.getDatatypeLibrary().equals("")) noteDatatypeLibrary(p.getDatatypeLibrary()); for (Iterator iter = p.getPrefixMap().entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry)iter.next(); noteNs((String)entry.getKey(), (String)entry.getValue()); } return null; } public Object visitData(DataPattern p) { visitAnnotated(p); noteDatatypeLibrary(p.getDatatypeLibrary()); Pattern except = p.getExcept(); if (except != null) except.accept(this); for (Iterator iter = p.getParams().iterator(); iter.hasNext();) visitAnnotated((Param)iter.next()); return null; } public Object visitName(NameNameClass nc) { visitAnnotated(nc); noteNs(nc.getPrefix(), nc.getNamespaceUri()); return null; } public Object visitAnyName(AnyNameNameClass nc) { visitAnnotated(nc); NameClass except = nc.getExcept(); if (except != null) except.accept(this); return null; } public Object visitNsName(NsNameNameClass nc) { visitAnnotated(nc); noteInheritNs(nc.getNs()); NameClass except = nc.getExcept(); if (except != null) except.accept(this); return null; } private String datatypeLibrary = null; private final Map prefixMap = new HashMap(); private boolean haveInherit = false; private Context lastContext = null; private void noteDatatypeLibrary(String uri) { if (datatypeLibrary == null || datatypeLibrary.length() == 0) datatypeLibrary = uri; } private void noteInheritNs(String ns) { if (ns == NameClass.INHERIT_NS) haveInherit = true; } private void noteNs(String prefix, String ns) { if (ns == NameClass.INHERIT_NS) { haveInherit = true; return; } if (prefix == null) prefix = ""; if (ns == null || (ns.length() == 0 && prefix.length() != 0) || prefixMap.containsKey(prefix)) return; prefixMap.put(prefix, ns); } private void noteContext(Context context) { if (context == null || context == lastContext) return; lastContext = context; for (Enumeration enum = context.prefixes(); enum.hasMoreElements();) { String prefix = (String)enum.nextElement(); noteNs(prefix, context.resolveNamespacePrefix(prefix)); } } Map getPrefixMap() { if (haveInherit) prefixMap.remove(""); prefixMap.put("xml", WellKnownNamespaces.XML); return prefixMap; } String getDatatypeLibrary() { return datatypeLibrary; } }
Note inherited namespace on include if any. Use a default namespace for nsName and include if not already used.
trang/src/com/thaiopensource/relaxng/output/rng/Analyzer.java
Note inherited namespace on include if any. Use a default namespace for nsName and include if not already used.
<ide><path>rang/src/com/thaiopensource/relaxng/output/rng/Analyzer.java <ide> <ide> public Object visitInclude(IncludeComponent c) { <ide> visitAnnotated(c); <add> noteInheritNs(c.getNs()); <ide> return visitContainer(c); <ide> } <ide> <ide> private final Map prefixMap = new HashMap(); <ide> private boolean haveInherit = false; <ide> private Context lastContext = null; <add> private String noPrefixNs = null; <ide> <ide> private void noteDatatypeLibrary(String uri) { <ide> if (datatypeLibrary == null || datatypeLibrary.length() == 0) <ide> private void noteInheritNs(String ns) { <ide> if (ns == NameClass.INHERIT_NS) <ide> haveInherit = true; <add> else <add> noPrefixNs = ns; <ide> } <ide> <ide> private void noteNs(String prefix, String ns) { <ide> Map getPrefixMap() { <ide> if (haveInherit) <ide> prefixMap.remove(""); <add> else if (noPrefixNs != null && !prefixMap.containsKey("")) <add> prefixMap.put("", noPrefixNs); <ide> prefixMap.put("xml", WellKnownNamespaces.XML); <ide> return prefixMap; <ide> }
Java
apache-2.0
ed631987711128a4e1e30f0b07ec581e103e2426
0
Progressive-Learning-Platform/plp-grinder,Progressive-Learning-Platform/plp-grinder,zcmoore/plp-grinder,zcmoore/plp-grinder
public class BasicArithmatic { private static final int constant = 10; public static void main(String[] args) { int a = 4; int b = 3; int sum = a + b; int product = a*b; int difference = a -b; int addImmediate = a + 2; int subImmediate = a - 2; int addConstant = b * constant; addConstant += 2; int multiplyImmediate = b * 0x05; multiplyImmediate << 1; multiplyImmediate++; public int CasualMethod() { return 1; } casualMethod(); public int Sum(int a, int b) { return a + b; } // DO NOT LEX /* Do not lex */ /* * Do not lex 2 */ // /**/ Do not lex 3 /* * * // */ // Strings are currently unsupported // String s = "/*LexAstring" + lex + "LexAstring*/ LexAstring"; } }
sampleData/BasicArithmatic.java
public class BasicArithmatic { private static final int constant = 10; public static void main(String[] args) { int a = 4; int b = 3; int sum = a + b; int product = a*b; int difference = a -b; int addImmediate = a + 2; int subImmediate = a - 2; int addConstant = b * constant; addConstant += 2; int multiplyImmediate = b * 0x05; multiplyImmediate << 1; multiplyImmediate++; // DO NOT LEX /* Do not lex */ /* * Do not lex 2 */ // /**/ Do not lex 3 /* * * // */ // Strings are currently unsupported // String s = "/*LexAstring" + lex + "LexAstring*/ LexAstring"; } }
Token issue on Sum Method
sampleData/BasicArithmatic.java
Token issue on Sum Method
<ide><path>ampleData/BasicArithmatic.java <ide> int multiplyImmediate = b * 0x05; <ide> multiplyImmediate << 1; <ide> multiplyImmediate++; <add> <add> public int CasualMethod() <add> { <add> return 1; <add> } <add> <add> casualMethod(); <add> <add> public int Sum(int a, int b) <add> { <add> return a + b; <add> } <ide> // DO NOT LEX <ide> /* Do not lex */ <ide> /*
Java
mit
8d101163d4662f63aebc7b31548e729056c8b6dd
0
bfaksdl/ivt-lab
package hu.bme.mit.spaceship; /** * A simple spaceship with two proton torpedos and four lasers */ public class GT4500 implements SpaceShip { private TorpedoStore primaryTorpedoStore; private TorpedoStore secondaryTorpedoStore; private boolean wasPrimaryFiredLast = false; public GT4500() { this.primaryTorpedoStore = new TorpedoStore(10); this.secondaryTorpedoStore = new TorpedoStore(10); } public boolean fireLasers(FiringMode firingMode) { // TODO not implemented yet return false; } /** * Tries to fire the torpedo stores of the ship. * * @param firingMode how many torpedo bays to fire * SINGLE: fires only one of the bays. * - For the first time the primary store is fired. * - To give some cooling time to the torpedo stores, torpedo stores are fired alternating. * - But if the store next in line is empty the ship tries to fire the other store. * - If the fired store reports a failure, the ship does not try to fire the other one. * ALL: tries to fire both of the torpedo stores. * * @return whether at least one torpedo was fired successfully */ @Override public boolean fireTorpedos(FiringMode firingMode) { boolean firingSuccess = false; switch (firingMode) { case SINGLE: if (wasPrimaryFiredLast) { // try to fire the secondary first if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } else { // although primary was fired last time, but the secondary is empty // thus try to fire primary again if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } // if both of the stores are empty, nothing can be done, return failure } } else { // try to fire the primary first if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } else { // although secondary was fired last time, but primary is empty // thus try to fire secondary again if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } // if both of the stores are empty, nothing can be done, return failure } } break; case ALL: // try to fire both of the torpedos //TODO implement feature if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } // although secondary was fired last time, but primary is empty // thus try to fire secondary again if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } ///// Branch B break; } return firingSuccess; } }
src/main/java/hu/bme/mit/spaceship/GT4500.java
package hu.bme.mit.spaceship; /** * A simple spaceship with two proton torpedos and four lasers */ public class GT4500 implements SpaceShip { private TorpedoStore primaryTorpedoStore; private TorpedoStore secondaryTorpedoStore; private boolean wasPrimaryFiredLast = false; public GT4500() { this.primaryTorpedoStore = new TorpedoStore(10); this.secondaryTorpedoStore = new TorpedoStore(10); } public boolean fireLasers(FiringMode firingMode) { // TODO not implemented yet return false; } /** * Tries to fire the torpedo stores of the ship. * * @param firingMode how many torpedo bays to fire * SINGLE: fires only one of the bays. * - For the first time the primary store is fired. * - To give some cooling time to the torpedo stores, torpedo stores are fired alternating. * - But if the store next in line is empty the ship tries to fire the other store. * - If the fired store reports a failure, the ship does not try to fire the other one. * ALL: tries to fire both of the torpedo stores. * * @return whether at least one torpedo was fired successfully */ @Override public boolean fireTorpedos(FiringMode firingMode) { boolean firingSuccess = false; switch (firingMode) { case SINGLE: if (wasPrimaryFiredLast) { // try to fire the secondary first if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } else { // although primary was fired last time, but the secondary is empty // thus try to fire primary again if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } // if both of the stores are empty, nothing can be done, return failure } } else { // try to fire the primary first if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } else { // although secondary was fired last time, but primary is empty // thus try to fire secondary again if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } // if both of the stores are empty, nothing can be done, return failure } } break; case ALL: // try to fire both of the torpedos //TODO implement feature if (! primaryTorpedoStore.isEmpty()) { firingSuccess = primaryTorpedoStore.fire(1); wasPrimaryFiredLast = true; } // although secondary was fired last time, but primary is empty // thus try to fire secondary again if (! secondaryTorpedoStore.isEmpty()) { firingSuccess = secondaryTorpedoStore.fire(1); wasPrimaryFiredLast = false; } break; } return firingSuccess; } }
modified: src/main/java/hu/bme/mit/spaceship/GT4500.java
src/main/java/hu/bme/mit/spaceship/GT4500.java
modified: src/main/java/hu/bme/mit/spaceship/GT4500.java
<ide><path>rc/main/java/hu/bme/mit/spaceship/GT4500.java <ide> firingSuccess = secondaryTorpedoStore.fire(1); <ide> wasPrimaryFiredLast = false; <ide> } <add> ///// Branch B <ide> break; <ide> } <ide>
Java
apache-2.0
270bd6e06aa2ecec58492dd588d49a6169f169e4
0
farmerbb/Taskbar,farmerbb/Taskbar
/* Copyright 2019 Braden Farmer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.farmerbb.taskbar.ui; import android.graphics.PixelFormat; import android.view.WindowManager; import com.farmerbb.taskbar.util.U; import java.lang.reflect.Field; public class ViewParams { public int width; public int height; public int gravity; public int flags; public int bottomMargin; public ViewParams(int width, int height, int gravity, int flags, int bottomMargin) { this.width = width; this.height = height; this.gravity = gravity; this.flags = flags; this.bottomMargin = bottomMargin; } public WindowManager.LayoutParams toWindowManagerParams() { final WindowManager.LayoutParams wmParams = new WindowManager.LayoutParams( width, height, U.getOverlayType(), flags, PixelFormat.TRANSLUCENT ); if(gravity > -1) wmParams.gravity = gravity; if(bottomMargin > -1) wmParams.y = bottomMargin; U.allowReflection(); try { Class<?> layoutParamsClass = Class.forName("android.view.WindowManager$LayoutParams"); Field privateFlags = layoutParamsClass.getField("privateFlags"); Field noAnim = layoutParamsClass.getField("PRIVATE_FLAG_NO_MOVE_ANIMATION"); int privateFlagsValue = privateFlags.getInt(wmParams); int noAnimFlag = noAnim.getInt(wmParams); privateFlagsValue |= noAnimFlag; privateFlags.setInt(wmParams, privateFlagsValue); } catch (Exception ignored) {} return wmParams; } public ViewParams updateWidth(int width) { return new ViewParams(width, height, gravity, flags, bottomMargin); } public ViewParams updateHeight(int height) { return new ViewParams(width, height, gravity, flags, bottomMargin); } public ViewParams updateBottomMargin(int bottomMargin) { return new ViewParams(width, height, gravity, flags, bottomMargin); } }
app/src/main/java/com/farmerbb/taskbar/ui/ViewParams.java
/* Copyright 2019 Braden Farmer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.farmerbb.taskbar.ui; import android.graphics.PixelFormat; import android.view.WindowManager; import com.farmerbb.taskbar.util.U; public class ViewParams { public int width; public int height; public int gravity; public int flags; public int bottomMargin; public ViewParams(int width, int height, int gravity, int flags, int bottomMargin) { this.width = width; this.height = height; this.gravity = gravity; this.flags = flags; this.bottomMargin = bottomMargin; } public WindowManager.LayoutParams toWindowManagerParams() { final WindowManager.LayoutParams wmParams = new WindowManager.LayoutParams( width, height, U.getOverlayType(), flags, PixelFormat.TRANSLUCENT ); if(gravity > -1) wmParams.gravity = gravity; if(bottomMargin > -1) wmParams.y = bottomMargin; return wmParams; } public ViewParams updateWidth(int width) { return new ViewParams(width, height, gravity, flags, bottomMargin); } public ViewParams updateHeight(int height) { return new ViewParams(width, height, gravity, flags, bottomMargin); } public ViewParams updateBottomMargin(int bottomMargin) { return new ViewParams(width, height, gravity, flags, bottomMargin); } }
Disable WindowManager animations
app/src/main/java/com/farmerbb/taskbar/ui/ViewParams.java
Disable WindowManager animations
<ide><path>pp/src/main/java/com/farmerbb/taskbar/ui/ViewParams.java <ide> import android.view.WindowManager; <ide> <ide> import com.farmerbb.taskbar.util.U; <add> <add>import java.lang.reflect.Field; <ide> <ide> public class ViewParams { <ide> public int width; <ide> if(bottomMargin > -1) <ide> wmParams.y = bottomMargin; <ide> <add> U.allowReflection(); <add> try { <add> Class<?> layoutParamsClass = Class.forName("android.view.WindowManager$LayoutParams"); <add> <add> Field privateFlags = layoutParamsClass.getField("privateFlags"); <add> Field noAnim = layoutParamsClass.getField("PRIVATE_FLAG_NO_MOVE_ANIMATION"); <add> <add> int privateFlagsValue = privateFlags.getInt(wmParams); <add> int noAnimFlag = noAnim.getInt(wmParams); <add> privateFlagsValue |= noAnimFlag; <add> <add> privateFlags.setInt(wmParams, privateFlagsValue); <add> } catch (Exception ignored) {} <add> <ide> return wmParams; <ide> } <ide>
Java
apache-2.0
26a982b579350b26f2f15d42487344c3250e3226
0
reTHINK-project/dev-registry-global
package eu.rethink.globalregistry; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.Scheduled; import eu.rethink.globalregistry.configuration.Config; import eu.rethink.globalregistry.dht.DHTManager; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.daemon.Daemon; import org.apache.commons.daemon.DaemonContext; import org.apache.commons.daemon.DaemonInitException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.SpringApplication; /** * Main class for GlobalRegistry daemon * * @date 06.04.2017 * @version 3 * @author Sebastian Göndör, Parth Singh */ @SpringBootApplication @EnableScheduling public class GlobalRegistryServer implements Daemon { private static Logger LOGGER; public static void main(String[] args) { System.out.println("Initializing GlobalRegistry"); Config config = Config.getInstance(); Options options = new Options(); Option helpOption = Option.builder("h") .longOpt("help") .desc("displays help on cli parameters") .build(); //Option reconnectIntervalOption = Option.builder("r") // .longOpt("reconnect_interval") // .desc("sets the reconnect interval in hours. 0 disables reconnect [" + config.getReconnectInterval() + "]") // .hasArg() // .build(); Option portRESTOption = Option.builder("p") .longOpt("port_rest") .desc("sets the port for the REST interface [" + config.getPortREST() + "]") .hasArg() .build(); Option networkInterfaceOption = Option.builder("n") .longOpt("network_interface") .desc("sets the network interface [" + config.getNetworkInterface() + "]") .hasArg() .build(); Option logPathOption = Option.builder("l") .longOpt("log_path") .desc("sets the directory for the log files [" + config.getLogPath() + "]") .hasArg() .build(); Option connectNodeOption = Option.builder("c") .longOpt("connect_node") .desc("sets the GReg node to connect to [" + config.getConnectNode() + "]") .hasArg() .build(); //options.addOption(reconnectIntervalOption); options.addOption(helpOption); options.addOption(portRESTOption); options.addOption(networkInterfaceOption); options.addOption(logPathOption); options.addOption(connectNodeOption); // parse command line parameters CommandLineParser parser = new DefaultParser(); try { CommandLine cmd = parser.parse(options, args); if(cmd.hasOption("h")) { HelpFormatter formater = new HelpFormatter(); formater.printHelp("GReg help", options); System.exit(0); } //if(cmd.hasOption("r")) //{ // config.setReconnectInterval(Integer.parseInt(cmd.getOptionValue("r"))); // TODO check for valid values //} if(cmd.hasOption("p")) { config.setPortREST(Integer.parseInt(cmd.getOptionValue("p"))); // TODO check for valid values } if(cmd.hasOption("n")) { config.setNetworkInterface(cmd.getOptionValue("n")); // TODO check for valid values } if(cmd.hasOption("l")) { config.setLogPath(cmd.getOptionValue("l")); // TODO check for valid values } if(cmd.hasOption("c")) { config.setConnectNode(cmd.getOptionValue("c")); // TODO check for valid values } System.out.println("-----Configuration: "); //System.out.println("reconnectInterval: " + config.getReconnectInterval()); System.out.println("connectNode: " + config.getConnectNode()); System.out.println("portREST: " + config.getPortREST()); System.out.println("networkInterface: " + config.getNetworkInterface()); System.out.println("logPath: " + config.getLogPath() + "\n-----"); // setup logging System.setProperty("loginfofile", config.getLogPath() + "log-info.log"); System.setProperty("logdebugfile", config.getLogPath() + "log-debug.log"); LOGGER = LoggerFactory.getLogger(GlobalRegistryServer.class); LOGGER.info(config.getProductName() + " " + config.getVersionName() + " " + config.getVersionCode()); LOGGER.info("Build #" + config.getVersionNumber() + " (" + config.getVersionDate() + ")\n"); } catch (ParseException e) { System.out.println("Wrong parameter. Error: " + e.getMessage()); } try { LOGGER.info("initializing DHT... "); DHTManager.getInstance().initDHT(); LOGGER.info("DHT initialized successfully"); LOGGER.info("initializing Global Registry server... "); // Registering the port for the REST interface to listen on System.getProperties().put("server.port", config.getPortREST()); LOGGER.info("REST interface listening on " + config.getPortREST()); SpringApplication.run(GlobalRegistryServer.class, args); } catch (Exception e) { LOGGER.info("failed!"); e.printStackTrace(); } } // 5 minute delay, then every 12 hours //@Scheduled(initialDelay=5 * 1000, fixedRate=30 * 1000) @Scheduled(initialDelay=2 * 60 * 1000, fixedRate=1 * 60 * 60 * 1000) protected void reconnect() { LOGGER.info("running reconnect functionality..."); try { DHTManager.getInstance().connectToConnectNode(); } catch (Exception e) { LOGGER.info("reconnect functionality failed!"); e.printStackTrace(); } } @Override public void init(DaemonContext daemonContext) throws DaemonInitException, Exception { //System.out.println("deamon: init()"); String arguments[] = daemonContext.getArguments(); System.out.println(arguments); GlobalRegistryServer.main(arguments); } @Override public void start() throws Exception { //System.out.println("deamon: start()"); } @Override public void stop() throws Exception { //System.out.println("deamon: exception()"); } @Override public void destroy() { //System.out.println("deamon: destroy()"); } }
src/main/java/eu/rethink/globalregistry/GlobalRegistryServer.java
package eu.rethink.globalregistry; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.Scheduled; import eu.rethink.globalregistry.configuration.Config; import eu.rethink.globalregistry.dht.DHTManager; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.daemon.Daemon; import org.apache.commons.daemon.DaemonContext; import org.apache.commons.daemon.DaemonInitException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.SpringApplication; /** * Main class for GlobalRegistry daemon * * @date 03.04.2017 * @version 3 * @author Sebastian Göndör, Parth Singh */ @SpringBootApplication @EnableScheduling public class GlobalRegistryServer implements Daemon { private static Logger LOGGER; public static void main(String[] args) { System.out.println("Initializing GlobalRegistry"); Config config = Config.getInstance(); Options options = new Options(); Option helpOption = Option.builder("h") .longOpt("help") .desc("displays help on cli parameters") .build(); //Option reconnectIntervalOption = Option.builder("r") // .longOpt("reconnect_interval") // .desc("sets the reconnect interval in hours. 0 disables reconnect [" + config.getReconnectInterval() + "]") // .hasArg() // .build(); Option portRESTOption = Option.builder("p") .longOpt("port_rest") .desc("sets the port for the REST interface [" + config.getPortREST() + "]") .hasArg() .build(); Option networkInterfaceOption = Option.builder("n") .longOpt("network_interface") .desc("sets the network interface [" + config.getNetworkInterface() + "]") .hasArg() .build(); Option logPathOption = Option.builder("l") .longOpt("log_path") .desc("sets the directory for the log files [" + config.getLogPath() + "]") .hasArg() .build(); Option connectNodeOption = Option.builder("c") .longOpt("connect_node") .desc("sets the GReg node to connect to [" + config.getConnectNode() + "]") .hasArg() .build(); //options.addOption(reconnectIntervalOption); options.addOption(helpOption); options.addOption(portRESTOption); options.addOption(networkInterfaceOption); options.addOption(logPathOption); options.addOption(connectNodeOption); // parse command line parameters CommandLineParser parser = new DefaultParser(); try { CommandLine cmd = parser.parse(options, args); if(cmd.hasOption("h")) { HelpFormatter formater = new HelpFormatter(); formater.printHelp("GReg help", options); System.exit(0); } //if(cmd.hasOption("r")) //{ // config.setReconnectInterval(Integer.parseInt(cmd.getOptionValue("r"))); // TODO check for valid values //} if(cmd.hasOption("p")) { config.setPortREST(Integer.parseInt(cmd.getOptionValue("p"))); // TODO check for valid values } if(cmd.hasOption("n")) { config.setNetworkInterface(cmd.getOptionValue("n")); // TODO check for valid values } if(cmd.hasOption("l")) { config.setLogPath(cmd.getOptionValue("l")); // TODO check for valid values } if(cmd.hasOption("c")) { config.setConnectNode(cmd.getOptionValue("c")); // TODO check for valid values } System.out.println("-----Configuration: "); //System.out.println("reconnectInterval: " + config.getReconnectInterval()); System.out.println("connectNode: " + config.getConnectNode()); System.out.println("portREST: " + config.getPortREST()); System.out.println("networkInterface: " + config.getNetworkInterface()); System.out.println("logPath: " + config.getLogPath() + "\n-----"); // setup logging System.setProperty("loginfofile", config.getLogPath() + "log-info.log"); System.setProperty("logdebugfile", config.getLogPath() + "log-debug.log"); LOGGER = LoggerFactory.getLogger(GlobalRegistryServer.class); LOGGER.info(config.getProductName() + " " + config.getVersionName() + " " + config.getVersionCode()); LOGGER.info("Build #" + config.getVersionNumber() + " (" + config.getVersionDate() + ")\n"); } catch (ParseException e) { System.out.println("Wrong parameter. Error: " + e.getMessage()); } try { LOGGER.info("initializing DHT... "); DHTManager.getInstance().initDHT(); LOGGER.info("DHT initialized successfully"); LOGGER.info("initializing Global Registry server... "); // Registering the port for the REST interface to listen on System.getProperties().put("server.port", config.getPortREST()); LOGGER.info("REST interface listening on " + config.getPortREST()); SpringApplication.run(GlobalRegistryServer.class, args); } catch (Exception e) { LOGGER.info("failed!"); e.printStackTrace(); } } // 5 minute delay, then every 12 hours //@Scheduled(initialDelay=5 * 1000, fixedRate=30 * 1000) @Scheduled(initialDelay=5 * 60 * 1000, fixedRate=12 * 60 * 60 * 1000) protected void reconnect() { try { LOGGER.info("running reconnect functionality..."); DHTManager.getInstance().connectToConnectNode(); } catch (Exception e) { LOGGER.info("reconnect functionality failed!"); e.printStackTrace(); } } @Override public void init(DaemonContext daemonContext) throws DaemonInitException, Exception { //System.out.println("deamon: init()"); String arguments[] = daemonContext.getArguments(); System.out.println(arguments); GlobalRegistryServer.main(arguments); } @Override public void start() throws Exception { //System.out.println("deamon: start()"); } @Override public void stop() throws Exception { //System.out.println("deamon: exception()"); } @Override public void destroy() { //System.out.println("deamon: destroy()"); } }
finalized reconnect functionality
src/main/java/eu/rethink/globalregistry/GlobalRegistryServer.java
finalized reconnect functionality
<ide><path>rc/main/java/eu/rethink/globalregistry/GlobalRegistryServer.java <ide> /** <ide> * Main class for GlobalRegistry daemon <ide> * <del> * @date 03.04.2017 <add> * @date 06.04.2017 <ide> * @version 3 <ide> * @author Sebastian Göndör, Parth Singh <ide> */ <ide> <ide> // 5 minute delay, then every 12 hours <ide> //@Scheduled(initialDelay=5 * 1000, fixedRate=30 * 1000) <del> @Scheduled(initialDelay=5 * 60 * 1000, fixedRate=12 * 60 * 60 * 1000) <add> @Scheduled(initialDelay=2 * 60 * 1000, fixedRate=1 * 60 * 60 * 1000) <ide> protected void reconnect() <ide> { <add> LOGGER.info("running reconnect functionality..."); <add> <ide> try <ide> { <del> LOGGER.info("running reconnect functionality..."); <ide> DHTManager.getInstance().connectToConnectNode(); <ide> } <ide> catch (Exception e)
Java
cc0-1.0
edee1152330a4740341ac120b18669824340c92c
0
MeasureAuthoringTool/clinical_quality_language,MeasureAuthoringTool/clinical_quality_language,cqframework/clinical_quality_language,MeasureAuthoringTool/clinical_quality_language,MeasureAuthoringTool/clinical_quality_language,cqframework/clinical_quality_language
package org.cqframework.cql.tools.formatter; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.tree.*; import org.cqframework.cql.gen.cqlBaseVisitor; import org.cqframework.cql.gen.cqlLexer; import org.cqframework.cql.gen.cqlParser; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.stream.Collectors; /** * Created by Bryn on 7/5/2017. */ public class CqlFormatterVisitor extends cqlBaseVisitor { private static List<CommentToken> comments = new ArrayList<>(); public static FormatResult getFormattedOutput(InputStream is) throws IOException { ANTLRInputStream in = new ANTLRInputStream(is); cqlLexer lexer = new cqlLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); populateComments(tokens); cqlParser parser = new cqlParser(tokens); parser.addErrorListener(new SyntaxErrorListener()); parser.setBuildParseTree(true); ParserRuleContext tree = parser.library(); if (((SyntaxErrorListener) parser.getErrorListeners().get(1)).errors.size() > 0) { return new FormatResult(((SyntaxErrorListener) parser.getErrorListeners().get(1)).errors, in.toString()); } CqlFormatterVisitor formatter = new CqlFormatterVisitor(); String output = (String)formatter.visit(tree); if (comments.size() > 0) { StringBuilder eofComments = new StringBuilder(); for (CommentToken comment : comments) { eofComments.append(comment.whitespaceBefore).append(comment.token.getText()); } comments.clear(); output += eofComments.toString(); } return new FormatResult(new ArrayList<>(), output); } public static String getInputStreamAsString(InputStream is) { return new BufferedReader(new InputStreamReader(is)).lines().collect(Collectors.joining("\n")); } public static void populateComments(CommonTokenStream tokens) { for (Token token : tokens.getTokens()) { if (token.getText().startsWith("//") || token.getText().startsWith("/*")) { String whitespace = token.getTokenIndex() < 1 ? "" : tokens.get(token.getTokenIndex() - 1).getText(); comments.add(new CommentToken(token, whitespace)); } } } private StringBuilder output; private final char space = '\u0020'; private final char tab = '\t'; private final String newLine = "\r\n"; private int currentLine = 0; private boolean onNewLine; private boolean needsWhitespace; private int indentLevel = 0; private boolean isFirstTupleElement = false; private String currentSection; private int sectionCount = 0; private void newSection(String section) { if (hasSectionContent()) { resetIndentLevel(); newLine(); } sectionCount = 0; currentSection = section; } private boolean needsSectionSeparator(String section) { switch (section) { case "statement": return true; default: return false; } } private void ensureSectionSeparator() { if (needsSectionSeparator(currentSection) && hasSectionContent()) { resetIndentLevel(); newLine(); } } private void addToSection(String section) { if (!section.equals(currentSection)) { newSection(section); } ensureSectionSeparator(); sectionCount++; } private boolean hasSectionContent() { return sectionCount > 0; } private int typeSpecifierLevel = 0; private void enterTypeSpecifier() { typeSpecifierLevel++; } private void exitTypeSpecifier() { typeSpecifierLevel--; } private boolean inTypeSpecifier() { return typeSpecifierLevel > 0; } private int functionDefinitionLevel = 0; private void enterFunctionDefinition() { functionDefinitionLevel++; } private void exitFunctionDefinition() { functionDefinitionLevel--; } private boolean inFunctionDefinition() { return functionDefinitionLevel > 0; } private int functionInvocationLevel = 0; private void enterFunctionInvocation() { functionInvocationLevel++; } private void exitFunctionInvocation() { functionInvocationLevel--; } private boolean inFunctionInvocation() { return functionInvocationLevel > 0; } private int retrieveLevel = 0; private void enterRetrieve() { retrieveLevel++; } private void exitRetrieve() { retrieveLevel--; } private boolean inRetrieve() { return retrieveLevel > 0; } private void enterClause() { increaseIndentLevel(); newLine(); } private void exitClause() { decreaseIndentLevel(); } private Stack<Integer> groups; private void enterGroup() { increaseIndentLevel(); groups.push(currentLine); } private void exitGroup() { Integer groupStartLine = groups.pop(); decreaseIndentLevel(); if (currentLine != groupStartLine) { newLine(); } } private boolean needsWhitespaceBefore(String terminal) { if (terminal.trim().isEmpty() || terminal.startsWith("//") || terminal.startsWith("/*")) { return false; } switch (terminal) { case ":": return false; case ".": return false; case ",": return false; case "<": return !inTypeSpecifier(); case ">": return !inTypeSpecifier(); case "(": return !inFunctionDefinition() && !inFunctionInvocation(); case ")": return !inFunctionDefinition() && !inFunctionInvocation(); case "[": return inRetrieve(); case "]": return false; default: return true; } } private boolean needsWhitespaceAfter(String terminal) { switch (terminal) { case ".": return false; case "<": return !inTypeSpecifier(); case ">": return !inTypeSpecifier(); case "(": return !inFunctionDefinition() && !inFunctionInvocation(); case ")": return !inFunctionDefinition() && !inFunctionInvocation(); case "[": return false; case "]": return inRetrieve(); default: return true; } } private void appendComment(CommentToken token) { // get the whitespace at the end of output String out = output.toString(); String whitespace = out.substring(out.replaceAll("\\s+$", "").length()); if (!whitespace.equals(token.whitespaceBefore)) { String whitespaceBefore = token.whitespaceBefore; output = new StringBuilder() .append(out.substring(0, out.length() - whitespace.length())) .append(whitespaceBefore); } output.append(token.token.getText()).append(whitespace); } private void appendTerminal(String terminal) { if (needsWhitespaceBefore(terminal)) { ensureWhitespace(); } output.append(terminal); onNewLine = false; needsWhitespace = needsWhitespaceAfter(terminal); } private void increaseIndentLevel() { indentLevel++; } private void decreaseIndentLevel() { indentLevel--; } private void resetIndentLevel() { indentLevel = 0; } private void newLine() { output.append(newLine); currentLine++; for (int i = 0; i < indentLevel; i++) { output.append(tab); } onNewLine = true; } private void newConstruct(String section) { resetIndentLevel(); newLine(); addToSection(section); } private void ensureWhitespace() { if (!onNewLine && needsWhitespace) { output.append(space); } } private void reset() { resetIndentLevel(); currentLine = 1; onNewLine = true; output = new StringBuilder(); groups = new Stack<>(); } @Override public Object visitLibrary(cqlParser.LibraryContext ctx) { reset(); super.visitLibrary(ctx); resetIndentLevel(); newLine(); return output.toString(); } @Override public Object visitChildren(RuleNode node) { Object result = defaultResult(); int n = node.getChildCount(); for (int i=0; i<n; i++) { if (!shouldVisitNextChild(node, result)) { break; } ParseTree c = node.getChild(i); if ((node instanceof cqlParser.TupleSelectorContext || node instanceof cqlParser.TupleTypeSpecifierContext) && c instanceof TerminalNodeImpl) { if (((TerminalNodeImpl) c).getSymbol().getText().equals("}")) { decreaseIndentLevel(); newLine(); } } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } @Override public Object visitLibraryDefinition(cqlParser.LibraryDefinitionContext ctx) { addToSection("library"); return super.visitLibraryDefinition(ctx); } @Override public Object visitUsingDefinition(cqlParser.UsingDefinitionContext ctx) { newConstruct("using"); return super.visitUsingDefinition(ctx); } @Override public Object visitIncludeDefinition(cqlParser.IncludeDefinitionContext ctx) { newConstruct("include"); return super.visitIncludeDefinition(ctx); } @Override public Object visitLocalIdentifier(cqlParser.LocalIdentifierContext ctx) { return super.visitLocalIdentifier(ctx); } @Override public Object visitAccessModifier(cqlParser.AccessModifierContext ctx) { return super.visitAccessModifier(ctx); } @Override public Object visitParameterDefinition(cqlParser.ParameterDefinitionContext ctx) { newConstruct("parameter"); return super.visitParameterDefinition(ctx); } @Override public Object visitCodesystemDefinition(cqlParser.CodesystemDefinitionContext ctx) { newConstruct("codesystem"); return super.visitCodesystemDefinition(ctx); } @Override public Object visitValuesetDefinition(cqlParser.ValuesetDefinitionContext ctx) { newConstruct("valueset"); return super.visitValuesetDefinition(ctx); } @Override public Object visitCodesystems(cqlParser.CodesystemsContext ctx) { return super.visitCodesystems(ctx); } @Override public Object visitCodesystemIdentifier(cqlParser.CodesystemIdentifierContext ctx) { return super.visitCodesystemIdentifier(ctx); } @Override public Object visitLibraryIdentifier(cqlParser.LibraryIdentifierContext ctx) { return super.visitLibraryIdentifier(ctx); } @Override public Object visitCodeDefinition(cqlParser.CodeDefinitionContext ctx) { newConstruct("code"); return super.visitCodeDefinition(ctx); } @Override public Object visitConceptDefinition(cqlParser.ConceptDefinitionContext ctx) { newConstruct("concept"); return super.visitConceptDefinition(ctx); } @Override public Object visitCodeIdentifier(cqlParser.CodeIdentifierContext ctx) { return super.visitCodeIdentifier(ctx); } @Override public Object visitCodesystemId(cqlParser.CodesystemIdContext ctx) { return super.visitCodesystemId(ctx); } @Override public Object visitValuesetId(cqlParser.ValuesetIdContext ctx) { return super.visitValuesetId(ctx); } @Override public Object visitVersionSpecifier(cqlParser.VersionSpecifierContext ctx) { return super.visitVersionSpecifier(ctx); } @Override public Object visitCodeId(cqlParser.CodeIdContext ctx) { return super.visitCodeId(ctx); } @Override public Object visitTypeSpecifier(cqlParser.TypeSpecifierContext ctx) { enterTypeSpecifier(); try { return super.visitTypeSpecifier(ctx); } finally { exitTypeSpecifier(); } } @Override public Object visitNamedTypeSpecifier(cqlParser.NamedTypeSpecifierContext ctx) { return super.visitNamedTypeSpecifier(ctx); } @Override public Object visitModelIdentifier(cqlParser.ModelIdentifierContext ctx) { return super.visitModelIdentifier(ctx); } @Override public Object visitListTypeSpecifier(cqlParser.ListTypeSpecifierContext ctx) { return super.visitListTypeSpecifier(ctx); } @Override public Object visitIntervalTypeSpecifier(cqlParser.IntervalTypeSpecifierContext ctx) { return super.visitIntervalTypeSpecifier(ctx); } @Override public Object visitTupleTypeSpecifier(cqlParser.TupleTypeSpecifierContext ctx) { isFirstTupleElement = true; return super.visitTupleTypeSpecifier(ctx); } @Override public Object visitTupleElementDefinition(cqlParser.TupleElementDefinitionContext ctx) { if (isFirstTupleElement) { increaseIndentLevel(); isFirstTupleElement = false; } newLine(); return super.visitTupleElementDefinition(ctx); } @Override public Object visitChoiceTypeSpecifier(cqlParser.ChoiceTypeSpecifierContext ctx) { return super.visitChoiceTypeSpecifier(ctx); } @Override public Object visitStatement(cqlParser.StatementContext ctx) { return super.visitStatement(ctx); } @Override public Object visitExpressionDefinition(cqlParser.ExpressionDefinitionContext ctx) { newConstruct("statement"); Object result = defaultResult(); int n = ctx.getChildCount(); for (int i=0; i<n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.expression()) { enterClause(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.expression()) { exitClause(); } } } return result; } @Override public Object visitContextDefinition(cqlParser.ContextDefinitionContext ctx) { newConstruct("statement"); return super.visitContextDefinition(ctx); } @Override public Object visitFunctionDefinition(cqlParser.FunctionDefinitionContext ctx) { newConstruct("statement"); Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c.getText().equals("(")) { enterFunctionDefinition(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); if (c.getText().equals(")")) { exitFunctionDefinition(); } if (c.getText().equals(":")) { enterClause(); clauseEntered = true; } } } finally { if (clauseEntered) { exitClause(); } } return result; } @Override public Object visitOperandDefinition(cqlParser.OperandDefinitionContext ctx) { return super.visitOperandDefinition(ctx); } @Override public Object visitFunctionBody(cqlParser.FunctionBodyContext ctx) { return super.visitFunctionBody(ctx); } @Override public Object visitQuerySource(cqlParser.QuerySourceContext ctx) { return super.visitQuerySource(ctx); } @Override public Object visitAliasedQuerySource(cqlParser.AliasedQuerySourceContext ctx) { return super.visitAliasedQuerySource(ctx); } @Override public Object visitAlias(cqlParser.AliasContext ctx) { return super.visitAlias(ctx); } @Override public Object visitQueryInclusionClause(cqlParser.QueryInclusionClauseContext ctx) { enterClause(); try { return super.visitQueryInclusionClause(ctx); } finally { exitClause(); } } private Object visitWithOrWithoutClause(ParserRuleContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c.getText().equals("such that")) { enterClause(); clauseEntered = true; } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } } finally { if (clauseEntered) { exitClause(); } } return result; } @Override public Object visitWithClause(cqlParser.WithClauseContext ctx) { return visitWithOrWithoutClause(ctx); } @Override public Object visitWithoutClause(cqlParser.WithoutClauseContext ctx) { return visitWithOrWithoutClause(ctx); } @Override public Object visitRetrieve(cqlParser.RetrieveContext ctx) { enterRetrieve(); try { return super.visitRetrieve(ctx); } finally { exitRetrieve(); } } @Override public Object visitCodePath(cqlParser.CodePathContext ctx) { return super.visitCodePath(ctx); } @Override public Object visitTerminology(cqlParser.TerminologyContext ctx) { return super.visitTerminology(ctx); } @Override public Object visitQualifier(cqlParser.QualifierContext ctx) { return super.visitQualifier(ctx); } @Override public Object visitQuery(cqlParser.QueryContext ctx) { return super.visitQuery(ctx); } @Override public Object visitSourceClause(cqlParser.SourceClauseContext ctx) { return super.visitSourceClause(ctx); } @Override public Object visitSingleSourceClause(cqlParser.SingleSourceClauseContext ctx) { return super.visitSingleSourceClause(ctx); } @Override public Object visitMultipleSourceClause(cqlParser.MultipleSourceClauseContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i == 1) { enterClause(); clauseEntered = true; } if (i > 1 && !c.getText().equals(",")) { newLine(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { if (clauseEntered) { exitClause(); } } } @Override public Object visitLetClause(cqlParser.LetClauseContext ctx) { enterClause(); try { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i > 1 && !c.getText().equals(",")) { newLine(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { exitClause(); } } @Override public Object visitLetClauseItem(cqlParser.LetClauseItemContext ctx) { return super.visitLetClauseItem(ctx); } @Override public Object visitWhereClause(cqlParser.WhereClauseContext ctx) { enterClause(); try { return super.visitWhereClause(ctx); } finally { exitClause(); } } @Override public Object visitReturnClause(cqlParser.ReturnClauseContext ctx) { enterClause(); try { return super.visitReturnClause(ctx); } finally { exitClause(); } } @Override public Object visitSortClause(cqlParser.SortClauseContext ctx) { enterClause(); try { return super.visitSortClause(ctx); } finally { exitClause(); } } @Override public Object visitSortDirection(cqlParser.SortDirectionContext ctx) { return super.visitSortDirection(ctx); } @Override public Object visitSortByItem(cqlParser.SortByItemContext ctx) { return super.visitSortByItem(ctx); } @Override public Object visitQualifiedIdentifier(cqlParser.QualifiedIdentifierContext ctx) { return super.visitQualifiedIdentifier(ctx); } @Override public Object visitDurationBetweenExpression(cqlParser.DurationBetweenExpressionContext ctx) { return super.visitDurationBetweenExpression(ctx); } @Override public Object visitInFixSetExpression(cqlParser.InFixSetExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitRetrieveExpression(cqlParser.RetrieveExpressionContext ctx) { return super.visitRetrieveExpression(ctx); } @Override public Object visitTimingExpression(cqlParser.TimingExpressionContext ctx) { return super.visitTimingExpression(ctx); } @Override public Object visitNotExpression(cqlParser.NotExpressionContext ctx) { return super.visitNotExpression(ctx); } @Override public Object visitQueryExpression(cqlParser.QueryExpressionContext ctx) { return super.visitQueryExpression(ctx); } @Override public Object visitBooleanExpression(cqlParser.BooleanExpressionContext ctx) { return super.visitBooleanExpression(ctx); } @Override public Object visitOrExpression(cqlParser.OrExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitCastExpression(cqlParser.CastExpressionContext ctx) { return super.visitCastExpression(ctx); } private Object visitBinaryClausedExpression(ParserRuleContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i == 1) { enterClause(); clauseEntered = true; } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { if (clauseEntered) { exitClause(); } } } @Override public Object visitAndExpression(cqlParser.AndExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitBetweenExpression(cqlParser.BetweenExpressionContext ctx) { return super.visitBetweenExpression(ctx); } @Override public Object visitMembershipExpression(cqlParser.MembershipExpressionContext ctx) { return super.visitMembershipExpression(ctx); } @Override public Object visitDifferenceBetweenExpression(cqlParser.DifferenceBetweenExpressionContext ctx) { return super.visitDifferenceBetweenExpression(ctx); } @Override public Object visitInequalityExpression(cqlParser.InequalityExpressionContext ctx) { return super.visitInequalityExpression(ctx); } @Override public Object visitEqualityExpression(cqlParser.EqualityExpressionContext ctx) { return super.visitEqualityExpression(ctx); } @Override public Object visitExistenceExpression(cqlParser.ExistenceExpressionContext ctx) { return super.visitExistenceExpression(ctx); } @Override public Object visitImpliesExpression(cqlParser.ImpliesExpressionContext ctx) { return super.visitImpliesExpression(ctx); } @Override public Object visitTermExpression(cqlParser.TermExpressionContext ctx) { return super.visitTermExpression(ctx); } @Override public Object visitTypeExpression(cqlParser.TypeExpressionContext ctx) { return super.visitTypeExpression(ctx); } @Override public Object visitDateTimePrecision(cqlParser.DateTimePrecisionContext ctx) { return super.visitDateTimePrecision(ctx); } @Override public Object visitDateTimeComponent(cqlParser.DateTimeComponentContext ctx) { return super.visitDateTimeComponent(ctx); } @Override public Object visitPluralDateTimePrecision(cqlParser.PluralDateTimePrecisionContext ctx) { return super.visitPluralDateTimePrecision(ctx); } @Override public Object visitAdditionExpressionTerm(cqlParser.AdditionExpressionTermContext ctx) { return super.visitAdditionExpressionTerm(ctx); } @Override public Object visitIndexedExpressionTerm(cqlParser.IndexedExpressionTermContext ctx) { return super.visitIndexedExpressionTerm(ctx); } @Override public Object visitWidthExpressionTerm(cqlParser.WidthExpressionTermContext ctx) { return super.visitWidthExpressionTerm(ctx); } @Override public Object visitTimeUnitExpressionTerm(cqlParser.TimeUnitExpressionTermContext ctx) { return super.visitTimeUnitExpressionTerm(ctx); } @Override public Object visitIfThenElseExpressionTerm(cqlParser.IfThenElseExpressionTermContext ctx) { return super.visitIfThenElseExpressionTerm(ctx); } @Override public Object visitTimeBoundaryExpressionTerm(cqlParser.TimeBoundaryExpressionTermContext ctx) { return super.visitTimeBoundaryExpressionTerm(ctx); } @Override public Object visitElementExtractorExpressionTerm(cqlParser.ElementExtractorExpressionTermContext ctx) { return super.visitElementExtractorExpressionTerm(ctx); } @Override public Object visitConversionExpressionTerm(cqlParser.ConversionExpressionTermContext ctx) { return super.visitConversionExpressionTerm(ctx); } @Override public Object visitTypeExtentExpressionTerm(cqlParser.TypeExtentExpressionTermContext ctx) { return super.visitTypeExtentExpressionTerm(ctx); } @Override public Object visitPredecessorExpressionTerm(cqlParser.PredecessorExpressionTermContext ctx) { return super.visitPredecessorExpressionTerm(ctx); } @Override public Object visitPointExtractorExpressionTerm(cqlParser.PointExtractorExpressionTermContext ctx) { return super.visitPointExtractorExpressionTerm(ctx); } @Override public Object visitMultiplicationExpressionTerm(cqlParser.MultiplicationExpressionTermContext ctx) { return super.visitMultiplicationExpressionTerm(ctx); } @Override public Object visitAggregateExpressionTerm(cqlParser.AggregateExpressionTermContext ctx) { return super.visitAggregateExpressionTerm(ctx); } @Override public Object visitDurationExpressionTerm(cqlParser.DurationExpressionTermContext ctx) { return super.visitDurationExpressionTerm(ctx); } @Override public Object visitCaseExpressionTerm(cqlParser.CaseExpressionTermContext ctx) { return super.visitCaseExpressionTerm(ctx); } @Override public Object visitPowerExpressionTerm(cqlParser.PowerExpressionTermContext ctx) { return super.visitPowerExpressionTerm(ctx); } @Override public Object visitSuccessorExpressionTerm(cqlParser.SuccessorExpressionTermContext ctx) { return super.visitSuccessorExpressionTerm(ctx); } @Override public Object visitPolarityExpressionTerm(cqlParser.PolarityExpressionTermContext ctx) { return super.visitPolarityExpressionTerm(ctx); } @Override public Object visitTermExpressionTerm(cqlParser.TermExpressionTermContext ctx) { return super.visitTermExpressionTerm(ctx); } @Override public Object visitInvocationExpressionTerm(cqlParser.InvocationExpressionTermContext ctx) { return super.visitInvocationExpressionTerm(ctx); } @Override public Object visitCaseExpressionItem(cqlParser.CaseExpressionItemContext ctx) { return super.visitCaseExpressionItem(ctx); } @Override public Object visitDateTimePrecisionSpecifier(cqlParser.DateTimePrecisionSpecifierContext ctx) { return super.visitDateTimePrecisionSpecifier(ctx); } @Override public Object visitRelativeQualifier(cqlParser.RelativeQualifierContext ctx) { return super.visitRelativeQualifier(ctx); } @Override public Object visitOffsetRelativeQualifier(cqlParser.OffsetRelativeQualifierContext ctx) { return super.visitOffsetRelativeQualifier(ctx); } @Override public Object visitExclusiveRelativeQualifier(cqlParser.ExclusiveRelativeQualifierContext ctx) { return super.visitExclusiveRelativeQualifier(ctx); } @Override public Object visitQuantityOffset(cqlParser.QuantityOffsetContext ctx) { return super.visitQuantityOffset(ctx); } @Override public Object visitTemporalRelationship(cqlParser.TemporalRelationshipContext ctx) { return super.visitTemporalRelationship(ctx); } @Override public Object visitConcurrentWithIntervalOperatorPhrase(cqlParser.ConcurrentWithIntervalOperatorPhraseContext ctx) { return super.visitConcurrentWithIntervalOperatorPhrase(ctx); } @Override public Object visitIncludesIntervalOperatorPhrase(cqlParser.IncludesIntervalOperatorPhraseContext ctx) { return super.visitIncludesIntervalOperatorPhrase(ctx); } @Override public Object visitIncludedInIntervalOperatorPhrase(cqlParser.IncludedInIntervalOperatorPhraseContext ctx) { return super.visitIncludedInIntervalOperatorPhrase(ctx); } @Override public Object visitBeforeOrAfterIntervalOperatorPhrase(cqlParser.BeforeOrAfterIntervalOperatorPhraseContext ctx) { return super.visitBeforeOrAfterIntervalOperatorPhrase(ctx); } @Override public Object visitWithinIntervalOperatorPhrase(cqlParser.WithinIntervalOperatorPhraseContext ctx) { return super.visitWithinIntervalOperatorPhrase(ctx); } @Override public Object visitMeetsIntervalOperatorPhrase(cqlParser.MeetsIntervalOperatorPhraseContext ctx) { return super.visitMeetsIntervalOperatorPhrase(ctx); } @Override public Object visitOverlapsIntervalOperatorPhrase(cqlParser.OverlapsIntervalOperatorPhraseContext ctx) { return super.visitOverlapsIntervalOperatorPhrase(ctx); } @Override public Object visitStartsIntervalOperatorPhrase(cqlParser.StartsIntervalOperatorPhraseContext ctx) { return super.visitStartsIntervalOperatorPhrase(ctx); } @Override public Object visitEndsIntervalOperatorPhrase(cqlParser.EndsIntervalOperatorPhraseContext ctx) { return super.visitEndsIntervalOperatorPhrase(ctx); } @Override public Object visitInvocationTerm(cqlParser.InvocationTermContext ctx) { return super.visitInvocationTerm(ctx); } @Override public Object visitLiteralTerm(cqlParser.LiteralTermContext ctx) { return super.visitLiteralTerm(ctx); } @Override public Object visitExternalConstantTerm(cqlParser.ExternalConstantTermContext ctx) { return super.visitExternalConstantTerm(ctx); } @Override public Object visitIntervalSelectorTerm(cqlParser.IntervalSelectorTermContext ctx) { return super.visitIntervalSelectorTerm(ctx); } @Override public Object visitTupleSelectorTerm(cqlParser.TupleSelectorTermContext ctx) { return super.visitTupleSelectorTerm(ctx); } @Override public Object visitInstanceSelectorTerm(cqlParser.InstanceSelectorTermContext ctx) { return super.visitInstanceSelectorTerm(ctx); } @Override public Object visitListSelectorTerm(cqlParser.ListSelectorTermContext ctx) { return super.visitListSelectorTerm(ctx); } @Override public Object visitCodeSelectorTerm(cqlParser.CodeSelectorTermContext ctx) { return super.visitCodeSelectorTerm(ctx); } @Override public Object visitConceptSelectorTerm(cqlParser.ConceptSelectorTermContext ctx) { return super.visitConceptSelectorTerm(ctx); } @Override public Object visitParenthesizedTerm(cqlParser.ParenthesizedTermContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.expression()) { enterGroup(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.expression()) { exitGroup(); } } } return result; } @Override public Object visitBooleanLiteral(cqlParser.BooleanLiteralContext ctx) { return super.visitBooleanLiteral(ctx); } @Override public Object visitNullLiteral(cqlParser.NullLiteralContext ctx) { return super.visitNullLiteral(ctx); } @Override public Object visitStringLiteral(cqlParser.StringLiteralContext ctx) { return super.visitStringLiteral(ctx); } @Override public Object visitNumberLiteral(cqlParser.NumberLiteralContext ctx) { return super.visitNumberLiteral(ctx); } @Override public Object visitDateTimeLiteral(cqlParser.DateTimeLiteralContext ctx) { return super.visitDateTimeLiteral(ctx); } @Override public Object visitTimeLiteral(cqlParser.TimeLiteralContext ctx) { return super.visitTimeLiteral(ctx); } @Override public Object visitQuantityLiteral(cqlParser.QuantityLiteralContext ctx) { return super.visitQuantityLiteral(ctx); } @Override public Object visitIntervalSelector(cqlParser.IntervalSelectorContext ctx) { return super.visitIntervalSelector(ctx); } @Override public Object visitTupleSelector(cqlParser.TupleSelectorContext ctx) { isFirstTupleElement = true; return super.visitTupleSelector(ctx); } @Override public Object visitTupleElementSelector(cqlParser.TupleElementSelectorContext ctx) { if (isFirstTupleElement) { increaseIndentLevel(); isFirstTupleElement = false; } newLine(); return super.visitTupleElementSelector(ctx); } @Override public Object visitInstanceSelector(cqlParser.InstanceSelectorContext ctx) { return super.visitInstanceSelector(ctx); } @Override public Object visitInstanceElementSelector(cqlParser.InstanceElementSelectorContext ctx) { return super.visitInstanceElementSelector(ctx); } @Override public Object visitListSelector(cqlParser.ListSelectorContext ctx) { return super.visitListSelector(ctx); } @Override public Object visitDisplayClause(cqlParser.DisplayClauseContext ctx) { return super.visitDisplayClause(ctx); } @Override public Object visitCodeSelector(cqlParser.CodeSelectorContext ctx) { return super.visitCodeSelector(ctx); } @Override public Object visitConceptSelector(cqlParser.ConceptSelectorContext ctx) { return super.visitConceptSelector(ctx); } @Override public Object visitIdentifier(cqlParser.IdentifierContext ctx) { return super.visitIdentifier(ctx); } @Override public Object visitExternalConstant(cqlParser.ExternalConstantContext ctx) { return super.visitExternalConstant(ctx); } @Override public Object visitMemberInvocation(cqlParser.MemberInvocationContext ctx) { return super.visitMemberInvocation(ctx); } @Override public Object visitFunctionInvocation(cqlParser.FunctionInvocationContext ctx) { enterFunctionInvocation(); try { return super.visitFunctionInvocation(ctx); } finally { exitFunctionInvocation(); } } @Override public Object visitThisInvocation(cqlParser.ThisInvocationContext ctx) { return super.visitThisInvocation(ctx); } @Override public Object visitFunction(cqlParser.FunctionContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.paramList()) { enterGroup(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.paramList()) { exitGroup(); } } } return result; } @Override public Object visitParamList(cqlParser.ParamListContext ctx) { return super.visitParamList(ctx); } @Override public Object visitQuantity(cqlParser.QuantityContext ctx) { return super.visitQuantity(ctx); } @Override public Object visitUnit(cqlParser.UnitContext ctx) { return super.visitUnit(ctx); } @Override public Object visitTerminal(TerminalNode node) { checkForComment(node); appendTerminal(node.getText()); return super.visitTerminal(node); } private void checkForComment(TerminalNode node) { int numComments = 0; for (CommentToken token : comments) { if (token.token.getTokenIndex() < node.getSymbol().getTokenIndex()) { appendComment(token); ++numComments; } } while (numComments > 0) { comments.remove(--numComments); } } private static class CommentToken { private Token token; private String whitespaceBefore; public CommentToken(Token token, String whitespaceBefore) { this.token = token; this.whitespaceBefore = whitespaceBefore; } } private static class SyntaxErrorListener extends BaseErrorListener { private List<Exception> errors = new ArrayList<>(); @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { if (!((Token) offendingSymbol).getText().trim().isEmpty()) { errors.add(new Exception(String.format("[%d:%d]: %s", line, charPositionInLine, msg))); } } } public static class FormatResult { List<Exception> errors; String output; public FormatResult(List<Exception> errors, String output) { this.errors = errors; this.output = output; } } }
Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/CqlFormatterVisitor.java
package org.cqframework.cql.tools.formatter; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.tree.*; import org.cqframework.cql.gen.cqlBaseVisitor; import org.cqframework.cql.gen.cqlLexer; import org.cqframework.cql.gen.cqlParser; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.stream.Collectors; /** * Created by Bryn on 7/5/2017. */ public class CqlFormatterVisitor extends cqlBaseVisitor { private static List<CommentToken> comments = new ArrayList<>(); public static FormatResult getFormattedOutput(InputStream is) throws IOException { ANTLRInputStream in = new ANTLRInputStream(is); cqlLexer lexer = new cqlLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.fill(); populateComments(tokens); cqlParser parser = new cqlParser(tokens); parser.addErrorListener(new SyntaxErrorListener()); parser.setBuildParseTree(true); ParserRuleContext tree = parser.library(); if (((SyntaxErrorListener) parser.getErrorListeners().get(1)).errors.size() > 0) { return new FormatResult(((SyntaxErrorListener) parser.getErrorListeners().get(1)).errors, in.toString()); } CqlFormatterVisitor formatter = new CqlFormatterVisitor(); String output = (String)formatter.visit(tree); if (comments.size() > 0) { StringBuilder eofComments = new StringBuilder(); for (CommentToken comment : comments) { eofComments.append(comment.whitespaceBefore).append(comment.token.getText()); } comments.clear(); output += eofComments.toString(); } return new FormatResult(new ArrayList<>(), output); } public static String getInputStreamAsString(InputStream is) { return new BufferedReader(new InputStreamReader(is)).lines().collect(Collectors.joining("\n")); } public static void populateComments(CommonTokenStream tokens) { for (Token token : tokens.getTokens()) { if (token.getText().startsWith("//") || token.getText().startsWith("/*")) { String whitespace = token.getTokenIndex() < 1 ? "" : tokens.get(token.getTokenIndex() - 1).getText(); comments.add(new CommentToken(token, whitespace)); } } } private StringBuilder output; private final char space = '\u0020'; private final char tab = '\t'; private final String newLine = "\r\n"; private int currentLine = 0; private boolean onNewLine; private boolean needsWhitespace; private int indentLevel = 0; private boolean isFirstTupleElement = false; private String currentSection; private int sectionCount = 0; private void newSection(String section) { if (hasSectionContent()) { resetIndentLevel(); newLine(); } sectionCount = 0; currentSection = section; } private boolean needsSectionSeparator(String section) { switch (section) { case "statement": return true; default: return false; } } private void ensureSectionSeparator() { if (needsSectionSeparator(currentSection) && hasSectionContent()) { resetIndentLevel(); newLine(); } } private void addToSection(String section) { if (!section.equals(currentSection)) { newSection(section); } ensureSectionSeparator(); sectionCount++; } private boolean hasSectionContent() { return sectionCount > 0; } private int typeSpecifierLevel = 0; private void enterTypeSpecifier() { typeSpecifierLevel++; } private void exitTypeSpecifier() { typeSpecifierLevel--; } private boolean inTypeSpecifier() { return typeSpecifierLevel > 0; } private int functionDefinitionLevel = 0; private void enterFunctionDefinition() { functionDefinitionLevel++; } private void exitFunctionDefinition() { functionDefinitionLevel--; } private boolean inFunctionDefinition() { return functionDefinitionLevel > 0; } private int functionInvocationLevel = 0; private void enterFunctionInvocation() { functionInvocationLevel++; } private void exitFunctionInvocation() { functionInvocationLevel--; } private boolean inFunctionInvocation() { return functionInvocationLevel > 0; } private int retrieveLevel = 0; private void enterRetrieve() { retrieveLevel++; } private void exitRetrieve() { retrieveLevel--; } private boolean inRetrieve() { return retrieveLevel > 0; } private void enterClause() { increaseIndentLevel(); newLine(); } private void exitClause() { decreaseIndentLevel(); } private Stack<Integer> groups; private void enterGroup() { increaseIndentLevel(); groups.push(currentLine); } private void exitGroup() { Integer groupStartLine = groups.pop(); decreaseIndentLevel(); if (currentLine != groupStartLine) { newLine(); } } private boolean needsWhitespaceBefore(String terminal) { if (terminal.trim().isEmpty() || terminal.startsWith("//") || terminal.startsWith("/*")) { return false; } switch (terminal) { case ":": return false; case ".": return false; case ",": return false; case "<": return !inTypeSpecifier(); case ">": return !inTypeSpecifier(); case "(": return !inFunctionDefinition() && !inFunctionInvocation(); case ")": return !inFunctionDefinition() && !inFunctionInvocation(); case "[": return inRetrieve(); case "]": return false; default: return true; } } private boolean needsWhitespaceAfter(String terminal) { switch (terminal) { case ".": return false; case "<": return !inTypeSpecifier(); case ">": return !inTypeSpecifier(); case "(": return !inFunctionDefinition() && !inFunctionInvocation(); case ")": return !inFunctionDefinition() && !inFunctionInvocation(); case "[": return false; case "]": return inRetrieve(); default: return true; } } private void appendComment(CommentToken token) { // get the whitespace at the end of output String out = output.toString(); String whitespace = out.substring(out.replaceAll("\\s+$", "").length()); if (!whitespace.equals(token.whitespaceBefore)) { String whitespaceBefore = token.whitespaceBefore; output = new StringBuilder() .append(out.substring(0, out.length() - whitespace.length())) .append(whitespaceBefore); } output.append(token.token.getText()).append(whitespace); } private void appendTerminal(String terminal) { if (needsWhitespaceBefore(terminal)) { ensureWhitespace(); } output.append(terminal); onNewLine = false; needsWhitespace = needsWhitespaceAfter(terminal); } private void increaseIndentLevel() { indentLevel++; } private void decreaseIndentLevel() { indentLevel--; } private void resetIndentLevel() { indentLevel = 0; } private void newLine() { output.append(newLine); currentLine++; for (int i = 0; i < indentLevel; i++) { output.append(tab); } onNewLine = true; } private void newConstruct(String section) { resetIndentLevel(); newLine(); addToSection(section); } private void ensureWhitespace() { if (!onNewLine && needsWhitespace) { output.append(space); } } private void reset() { resetIndentLevel(); currentLine = 1; onNewLine = true; output = new StringBuilder(); groups = new Stack<>(); } @Override public Object visitLibrary(cqlParser.LibraryContext ctx) { reset(); super.visitLibrary(ctx); resetIndentLevel(); newLine(); return output.toString(); } @Override public Object visitChildren(RuleNode node) { Object result = defaultResult(); int n = node.getChildCount(); for (int i=0; i<n; i++) { if (!shouldVisitNextChild(node, result)) { break; } ParseTree c = node.getChild(i); if ((node instanceof cqlParser.TupleSelectorContext || node instanceof cqlParser.TupleTypeSpecifierContext) && c instanceof TerminalNodeImpl) { if (((TerminalNodeImpl) c).getSymbol().getText().equals("}")) { decreaseIndentLevel(); newLine(); } } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } @Override public Object visitLibraryDefinition(cqlParser.LibraryDefinitionContext ctx) { addToSection("library"); return super.visitLibraryDefinition(ctx); } @Override public Object visitUsingDefinition(cqlParser.UsingDefinitionContext ctx) { newConstruct("using"); return super.visitUsingDefinition(ctx); } @Override public Object visitIncludeDefinition(cqlParser.IncludeDefinitionContext ctx) { newConstruct("include"); return super.visitIncludeDefinition(ctx); } @Override public Object visitLocalIdentifier(cqlParser.LocalIdentifierContext ctx) { return super.visitLocalIdentifier(ctx); } @Override public Object visitAccessModifier(cqlParser.AccessModifierContext ctx) { return super.visitAccessModifier(ctx); } @Override public Object visitParameterDefinition(cqlParser.ParameterDefinitionContext ctx) { newConstruct("parameter"); return super.visitParameterDefinition(ctx); } @Override public Object visitCodesystemDefinition(cqlParser.CodesystemDefinitionContext ctx) { newConstruct("codesystem"); return super.visitCodesystemDefinition(ctx); } @Override public Object visitValuesetDefinition(cqlParser.ValuesetDefinitionContext ctx) { newConstruct("valueset"); return super.visitValuesetDefinition(ctx); } @Override public Object visitCodesystems(cqlParser.CodesystemsContext ctx) { return super.visitCodesystems(ctx); } @Override public Object visitCodesystemIdentifier(cqlParser.CodesystemIdentifierContext ctx) { return super.visitCodesystemIdentifier(ctx); } @Override public Object visitLibraryIdentifier(cqlParser.LibraryIdentifierContext ctx) { return super.visitLibraryIdentifier(ctx); } @Override public Object visitCodeDefinition(cqlParser.CodeDefinitionContext ctx) { newConstruct("code"); return super.visitCodeDefinition(ctx); } @Override public Object visitConceptDefinition(cqlParser.ConceptDefinitionContext ctx) { newConstruct("concept"); return super.visitConceptDefinition(ctx); } @Override public Object visitCodeIdentifier(cqlParser.CodeIdentifierContext ctx) { return super.visitCodeIdentifier(ctx); } @Override public Object visitCodesystemId(cqlParser.CodesystemIdContext ctx) { return super.visitCodesystemId(ctx); } @Override public Object visitValuesetId(cqlParser.ValuesetIdContext ctx) { return super.visitValuesetId(ctx); } @Override public Object visitVersionSpecifier(cqlParser.VersionSpecifierContext ctx) { return super.visitVersionSpecifier(ctx); } @Override public Object visitCodeId(cqlParser.CodeIdContext ctx) { return super.visitCodeId(ctx); } @Override public Object visitTypeSpecifier(cqlParser.TypeSpecifierContext ctx) { enterTypeSpecifier(); try { return super.visitTypeSpecifier(ctx); } finally { exitTypeSpecifier(); } } @Override public Object visitNamedTypeSpecifier(cqlParser.NamedTypeSpecifierContext ctx) { return super.visitNamedTypeSpecifier(ctx); } @Override public Object visitModelIdentifier(cqlParser.ModelIdentifierContext ctx) { return super.visitModelIdentifier(ctx); } @Override public Object visitListTypeSpecifier(cqlParser.ListTypeSpecifierContext ctx) { return super.visitListTypeSpecifier(ctx); } @Override public Object visitIntervalTypeSpecifier(cqlParser.IntervalTypeSpecifierContext ctx) { return super.visitIntervalTypeSpecifier(ctx); } @Override public Object visitTupleTypeSpecifier(cqlParser.TupleTypeSpecifierContext ctx) { isFirstTupleElement = true; return super.visitTupleTypeSpecifier(ctx); } @Override public Object visitTupleElementDefinition(cqlParser.TupleElementDefinitionContext ctx) { if (isFirstTupleElement) { increaseIndentLevel(); isFirstTupleElement = false; } newLine(); return super.visitTupleElementDefinition(ctx); } @Override public Object visitChoiceTypeSpecifier(cqlParser.ChoiceTypeSpecifierContext ctx) { return super.visitChoiceTypeSpecifier(ctx); } @Override public Object visitStatement(cqlParser.StatementContext ctx) { return super.visitStatement(ctx); } @Override public Object visitExpressionDefinition(cqlParser.ExpressionDefinitionContext ctx) { newConstruct("statement"); Object result = defaultResult(); int n = ctx.getChildCount(); for (int i=0; i<n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.expression()) { enterClause(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.expression()) { exitClause(); } } } return result; } @Override public Object visitContextDefinition(cqlParser.ContextDefinitionContext ctx) { newConstruct("statement"); return super.visitContextDefinition(ctx); } @Override public Object visitFunctionDefinition(cqlParser.FunctionDefinitionContext ctx) { newConstruct("statement"); Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c.getText().equals("(")) { enterFunctionDefinition(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); if (c.getText().equals(")")) { exitFunctionDefinition(); } if (c.getText().equals(":")) { enterClause(); clauseEntered = true; } } } finally { if (clauseEntered) { exitClause(); } } return result; } @Override public Object visitOperandDefinition(cqlParser.OperandDefinitionContext ctx) { return super.visitOperandDefinition(ctx); } @Override public Object visitFunctionBody(cqlParser.FunctionBodyContext ctx) { return super.visitFunctionBody(ctx); } @Override public Object visitQuerySource(cqlParser.QuerySourceContext ctx) { return super.visitQuerySource(ctx); } @Override public Object visitAliasedQuerySource(cqlParser.AliasedQuerySourceContext ctx) { return super.visitAliasedQuerySource(ctx); } @Override public Object visitAlias(cqlParser.AliasContext ctx) { return super.visitAlias(ctx); } @Override public Object visitQueryInclusionClause(cqlParser.QueryInclusionClauseContext ctx) { enterClause(); try { return super.visitQueryInclusionClause(ctx); } finally { exitClause(); } } private Object visitWithOrWithoutClause(ParserRuleContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c.getText().equals("such that")) { enterClause(); clauseEntered = true; } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } } finally { if (clauseEntered) { exitClause(); } } return result; } @Override public Object visitWithClause(cqlParser.WithClauseContext ctx) { return visitWithOrWithoutClause(ctx); } @Override public Object visitWithoutClause(cqlParser.WithoutClauseContext ctx) { return visitWithOrWithoutClause(ctx); } @Override public Object visitRetrieve(cqlParser.RetrieveContext ctx) { enterRetrieve(); try { return super.visitRetrieve(ctx); } finally { exitRetrieve(); } } @Override public Object visitCodePath(cqlParser.CodePathContext ctx) { return super.visitCodePath(ctx); } @Override public Object visitTerminology(cqlParser.TerminologyContext ctx) { return super.visitTerminology(ctx); } @Override public Object visitQualifier(cqlParser.QualifierContext ctx) { return super.visitQualifier(ctx); } @Override public Object visitQuery(cqlParser.QueryContext ctx) { return super.visitQuery(ctx); } @Override public Object visitSourceClause(cqlParser.SourceClauseContext ctx) { return super.visitSourceClause(ctx); } @Override public Object visitSingleSourceClause(cqlParser.SingleSourceClauseContext ctx) { return super.visitSingleSourceClause(ctx); } @Override public Object visitMultipleSourceClause(cqlParser.MultipleSourceClauseContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i == 1) { enterClause(); clauseEntered = true; } if (i > 1 && !c.getText().equals(",")) { newLine(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { if (clauseEntered) { exitClause(); } } } @Override public Object visitLetClause(cqlParser.LetClauseContext ctx) { enterClause(); try { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i > 1 && !c.getText().equals(",")) { newLine(); } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { exitClause(); } } @Override public Object visitLetClauseItem(cqlParser.LetClauseItemContext ctx) { return super.visitLetClauseItem(ctx); } @Override public Object visitWhereClause(cqlParser.WhereClauseContext ctx) { enterClause(); try { return super.visitWhereClause(ctx); } finally { exitClause(); } } @Override public Object visitReturnClause(cqlParser.ReturnClauseContext ctx) { enterClause(); try { return super.visitReturnClause(ctx); } finally { exitClause(); } } @Override public Object visitSortClause(cqlParser.SortClauseContext ctx) { enterClause(); try { return super.visitSortClause(ctx); } finally { exitClause(); } } @Override public Object visitSortDirection(cqlParser.SortDirectionContext ctx) { return super.visitSortDirection(ctx); } @Override public Object visitSortByItem(cqlParser.SortByItemContext ctx) { return super.visitSortByItem(ctx); } @Override public Object visitQualifiedIdentifier(cqlParser.QualifiedIdentifierContext ctx) { return super.visitQualifiedIdentifier(ctx); } @Override public Object visitDurationBetweenExpression(cqlParser.DurationBetweenExpressionContext ctx) { return super.visitDurationBetweenExpression(ctx); } @Override public Object visitInFixSetExpression(cqlParser.InFixSetExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitRetrieveExpression(cqlParser.RetrieveExpressionContext ctx) { return super.visitRetrieveExpression(ctx); } @Override public Object visitTimingExpression(cqlParser.TimingExpressionContext ctx) { return super.visitTimingExpression(ctx); } @Override public Object visitNotExpression(cqlParser.NotExpressionContext ctx) { return super.visitNotExpression(ctx); } @Override public Object visitQueryExpression(cqlParser.QueryExpressionContext ctx) { return super.visitQueryExpression(ctx); } @Override public Object visitBooleanExpression(cqlParser.BooleanExpressionContext ctx) { return super.visitBooleanExpression(ctx); } @Override public Object visitOrExpression(cqlParser.OrExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitCastExpression(cqlParser.CastExpressionContext ctx) { return super.visitCastExpression(ctx); } private Object visitBinaryClausedExpression(ParserRuleContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); boolean clauseEntered = false; try { for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (i == 1) { enterClause(); clauseEntered = true; } Object childResult = c.accept(this); result = aggregateResult(result, childResult); } return result; } finally { if (clauseEntered) { exitClause(); } } } @Override public Object visitAndExpression(cqlParser.AndExpressionContext ctx) { return visitBinaryClausedExpression(ctx); } @Override public Object visitBetweenExpression(cqlParser.BetweenExpressionContext ctx) { return super.visitBetweenExpression(ctx); } @Override public Object visitMembershipExpression(cqlParser.MembershipExpressionContext ctx) { return super.visitMembershipExpression(ctx); } @Override public Object visitDifferenceBetweenExpression(cqlParser.DifferenceBetweenExpressionContext ctx) { return super.visitDifferenceBetweenExpression(ctx); } @Override public Object visitInequalityExpression(cqlParser.InequalityExpressionContext ctx) { return super.visitInequalityExpression(ctx); } @Override public Object visitEqualityExpression(cqlParser.EqualityExpressionContext ctx) { return super.visitEqualityExpression(ctx); } @Override public Object visitExistenceExpression(cqlParser.ExistenceExpressionContext ctx) { return super.visitExistenceExpression(ctx); } @Override public Object visitImpliesExpression(cqlParser.ImpliesExpressionContext ctx) { return super.visitImpliesExpression(ctx); } @Override public Object visitTermExpression(cqlParser.TermExpressionContext ctx) { return super.visitTermExpression(ctx); } @Override public Object visitTypeExpression(cqlParser.TypeExpressionContext ctx) { return super.visitTypeExpression(ctx); } @Override public Object visitDateTimePrecision(cqlParser.DateTimePrecisionContext ctx) { return super.visitDateTimePrecision(ctx); } @Override public Object visitDateTimeComponent(cqlParser.DateTimeComponentContext ctx) { return super.visitDateTimeComponent(ctx); } @Override public Object visitPluralDateTimePrecision(cqlParser.PluralDateTimePrecisionContext ctx) { return super.visitPluralDateTimePrecision(ctx); } @Override public Object visitAdditionExpressionTerm(cqlParser.AdditionExpressionTermContext ctx) { return super.visitAdditionExpressionTerm(ctx); } @Override public Object visitIndexedExpressionTerm(cqlParser.IndexedExpressionTermContext ctx) { return super.visitIndexedExpressionTerm(ctx); } @Override public Object visitWidthExpressionTerm(cqlParser.WidthExpressionTermContext ctx) { return super.visitWidthExpressionTerm(ctx); } @Override public Object visitTimeUnitExpressionTerm(cqlParser.TimeUnitExpressionTermContext ctx) { return super.visitTimeUnitExpressionTerm(ctx); } @Override public Object visitIfThenElseExpressionTerm(cqlParser.IfThenElseExpressionTermContext ctx) { return super.visitIfThenElseExpressionTerm(ctx); } @Override public Object visitTimeBoundaryExpressionTerm(cqlParser.TimeBoundaryExpressionTermContext ctx) { return super.visitTimeBoundaryExpressionTerm(ctx); } @Override public Object visitElementExtractorExpressionTerm(cqlParser.ElementExtractorExpressionTermContext ctx) { return super.visitElementExtractorExpressionTerm(ctx); } @Override public Object visitConversionExpressionTerm(cqlParser.ConversionExpressionTermContext ctx) { return super.visitConversionExpressionTerm(ctx); } @Override public Object visitTypeExtentExpressionTerm(cqlParser.TypeExtentExpressionTermContext ctx) { return super.visitTypeExtentExpressionTerm(ctx); } @Override public Object visitPredecessorExpressionTerm(cqlParser.PredecessorExpressionTermContext ctx) { return super.visitPredecessorExpressionTerm(ctx); } @Override public Object visitPointExtractorExpressionTerm(cqlParser.PointExtractorExpressionTermContext ctx) { return super.visitPointExtractorExpressionTerm(ctx); } @Override public Object visitMultiplicationExpressionTerm(cqlParser.MultiplicationExpressionTermContext ctx) { return super.visitMultiplicationExpressionTerm(ctx); } @Override public Object visitAggregateExpressionTerm(cqlParser.AggregateExpressionTermContext ctx) { return super.visitAggregateExpressionTerm(ctx); } @Override public Object visitDurationExpressionTerm(cqlParser.DurationExpressionTermContext ctx) { return super.visitDurationExpressionTerm(ctx); } @Override public Object visitCaseExpressionTerm(cqlParser.CaseExpressionTermContext ctx) { return super.visitCaseExpressionTerm(ctx); } @Override public Object visitPowerExpressionTerm(cqlParser.PowerExpressionTermContext ctx) { return super.visitPowerExpressionTerm(ctx); } @Override public Object visitSuccessorExpressionTerm(cqlParser.SuccessorExpressionTermContext ctx) { return super.visitSuccessorExpressionTerm(ctx); } @Override public Object visitPolarityExpressionTerm(cqlParser.PolarityExpressionTermContext ctx) { return super.visitPolarityExpressionTerm(ctx); } @Override public Object visitTermExpressionTerm(cqlParser.TermExpressionTermContext ctx) { return super.visitTermExpressionTerm(ctx); } @Override public Object visitInvocationExpressionTerm(cqlParser.InvocationExpressionTermContext ctx) { return super.visitInvocationExpressionTerm(ctx); } @Override public Object visitCaseExpressionItem(cqlParser.CaseExpressionItemContext ctx) { return super.visitCaseExpressionItem(ctx); } @Override public Object visitDateTimePrecisionSpecifier(cqlParser.DateTimePrecisionSpecifierContext ctx) { return super.visitDateTimePrecisionSpecifier(ctx); } @Override public Object visitRelativeQualifier(cqlParser.RelativeQualifierContext ctx) { return super.visitRelativeQualifier(ctx); } @Override public Object visitOffsetRelativeQualifier(cqlParser.OffsetRelativeQualifierContext ctx) { return super.visitOffsetRelativeQualifier(ctx); } @Override public Object visitExclusiveRelativeQualifier(cqlParser.ExclusiveRelativeQualifierContext ctx) { return super.visitExclusiveRelativeQualifier(ctx); } @Override public Object visitQuantityOffset(cqlParser.QuantityOffsetContext ctx) { return super.visitQuantityOffset(ctx); } @Override public Object visitTemporalRelationship(cqlParser.TemporalRelationshipContext ctx) { return super.visitTemporalRelationship(ctx); } @Override public Object visitConcurrentWithIntervalOperatorPhrase(cqlParser.ConcurrentWithIntervalOperatorPhraseContext ctx) { return super.visitConcurrentWithIntervalOperatorPhrase(ctx); } @Override public Object visitIncludesIntervalOperatorPhrase(cqlParser.IncludesIntervalOperatorPhraseContext ctx) { return super.visitIncludesIntervalOperatorPhrase(ctx); } @Override public Object visitIncludedInIntervalOperatorPhrase(cqlParser.IncludedInIntervalOperatorPhraseContext ctx) { return super.visitIncludedInIntervalOperatorPhrase(ctx); } @Override public Object visitBeforeOrAfterIntervalOperatorPhrase(cqlParser.BeforeOrAfterIntervalOperatorPhraseContext ctx) { return super.visitBeforeOrAfterIntervalOperatorPhrase(ctx); } @Override public Object visitWithinIntervalOperatorPhrase(cqlParser.WithinIntervalOperatorPhraseContext ctx) { return super.visitWithinIntervalOperatorPhrase(ctx); } @Override public Object visitMeetsIntervalOperatorPhrase(cqlParser.MeetsIntervalOperatorPhraseContext ctx) { return super.visitMeetsIntervalOperatorPhrase(ctx); } @Override public Object visitOverlapsIntervalOperatorPhrase(cqlParser.OverlapsIntervalOperatorPhraseContext ctx) { return super.visitOverlapsIntervalOperatorPhrase(ctx); } @Override public Object visitStartsIntervalOperatorPhrase(cqlParser.StartsIntervalOperatorPhraseContext ctx) { return super.visitStartsIntervalOperatorPhrase(ctx); } @Override public Object visitEndsIntervalOperatorPhrase(cqlParser.EndsIntervalOperatorPhraseContext ctx) { return super.visitEndsIntervalOperatorPhrase(ctx); } @Override public Object visitInvocationTerm(cqlParser.InvocationTermContext ctx) { return super.visitInvocationTerm(ctx); } @Override public Object visitLiteralTerm(cqlParser.LiteralTermContext ctx) { return super.visitLiteralTerm(ctx); } @Override public Object visitExternalConstantTerm(cqlParser.ExternalConstantTermContext ctx) { return super.visitExternalConstantTerm(ctx); } @Override public Object visitIntervalSelectorTerm(cqlParser.IntervalSelectorTermContext ctx) { return super.visitIntervalSelectorTerm(ctx); } @Override public Object visitTupleSelectorTerm(cqlParser.TupleSelectorTermContext ctx) { return super.visitTupleSelectorTerm(ctx); } @Override public Object visitInstanceSelectorTerm(cqlParser.InstanceSelectorTermContext ctx) { return super.visitInstanceSelectorTerm(ctx); } @Override public Object visitListSelectorTerm(cqlParser.ListSelectorTermContext ctx) { return super.visitListSelectorTerm(ctx); } @Override public Object visitCodeSelectorTerm(cqlParser.CodeSelectorTermContext ctx) { return super.visitCodeSelectorTerm(ctx); } @Override public Object visitConceptSelectorTerm(cqlParser.ConceptSelectorTermContext ctx) { return super.visitConceptSelectorTerm(ctx); } @Override public Object visitParenthesizedTerm(cqlParser.ParenthesizedTermContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.expression()) { enterGroup(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.expression()) { exitGroup(); } } } return result; } @Override public Object visitBooleanLiteral(cqlParser.BooleanLiteralContext ctx) { return super.visitBooleanLiteral(ctx); } @Override public Object visitNullLiteral(cqlParser.NullLiteralContext ctx) { return super.visitNullLiteral(ctx); } @Override public Object visitStringLiteral(cqlParser.StringLiteralContext ctx) { return super.visitStringLiteral(ctx); } @Override public Object visitNumberLiteral(cqlParser.NumberLiteralContext ctx) { return super.visitNumberLiteral(ctx); } @Override public Object visitDateTimeLiteral(cqlParser.DateTimeLiteralContext ctx) { return super.visitDateTimeLiteral(ctx); } @Override public Object visitTimeLiteral(cqlParser.TimeLiteralContext ctx) { return super.visitTimeLiteral(ctx); } @Override public Object visitQuantityLiteral(cqlParser.QuantityLiteralContext ctx) { return super.visitQuantityLiteral(ctx); } @Override public Object visitIntervalSelector(cqlParser.IntervalSelectorContext ctx) { return super.visitIntervalSelector(ctx); } @Override public Object visitTupleSelector(cqlParser.TupleSelectorContext ctx) { isFirstTupleElement = true; return super.visitTupleSelector(ctx); } @Override public Object visitTupleElementSelector(cqlParser.TupleElementSelectorContext ctx) { if (isFirstTupleElement) { increaseIndentLevel(); isFirstTupleElement = false; } newLine(); return super.visitTupleElementSelector(ctx); } @Override public Object visitInstanceSelector(cqlParser.InstanceSelectorContext ctx) { return super.visitInstanceSelector(ctx); } @Override public Object visitInstanceElementSelector(cqlParser.InstanceElementSelectorContext ctx) { return super.visitInstanceElementSelector(ctx); } @Override public Object visitListSelector(cqlParser.ListSelectorContext ctx) { return super.visitListSelector(ctx); } @Override public Object visitDisplayClause(cqlParser.DisplayClauseContext ctx) { return super.visitDisplayClause(ctx); } @Override public Object visitCodeSelector(cqlParser.CodeSelectorContext ctx) { return super.visitCodeSelector(ctx); } @Override public Object visitConceptSelector(cqlParser.ConceptSelectorContext ctx) { return super.visitConceptSelector(ctx); } @Override public Object visitIdentifier(cqlParser.IdentifierContext ctx) { return super.visitIdentifier(ctx); } @Override public Object visitExternalConstant(cqlParser.ExternalConstantContext ctx) { return super.visitExternalConstant(ctx); } @Override public Object visitMemberInvocation(cqlParser.MemberInvocationContext ctx) { return super.visitMemberInvocation(ctx); } @Override public Object visitFunctionInvocation(cqlParser.FunctionInvocationContext ctx) { enterFunctionInvocation(); try { return super.visitFunctionInvocation(ctx); } finally { exitFunctionInvocation(); } } @Override public Object visitThisInvocation(cqlParser.ThisInvocationContext ctx) { return super.visitThisInvocation(ctx); } @Override public Object visitFunction(cqlParser.FunctionContext ctx) { Object result = defaultResult(); int n = ctx.getChildCount(); for (int i = 0; i < n; i++) { if (!shouldVisitNextChild(ctx, result)) { break; } ParseTree c = ctx.getChild(i); if (c == ctx.paramList()) { enterGroup(); } try { Object childResult = c.accept(this); result = aggregateResult(result, childResult); } finally { if (c == ctx.paramList()) { exitGroup(); } } } return result; } @Override public Object visitParamList(cqlParser.ParamListContext ctx) { return super.visitParamList(ctx); } @Override public Object visitQuantity(cqlParser.QuantityContext ctx) { return super.visitQuantity(ctx); } @Override public Object visitUnit(cqlParser.UnitContext ctx) { return super.visitUnit(ctx); } @Override public Object visitTerminal(TerminalNode node) { checkForComment(node); appendTerminal(node.getText()); return super.visitTerminal(node); } private void checkForComment(TerminalNode node) { int numComments = 0; for (CommentToken token : comments) { if (token.token.getTokenIndex() < node.getSymbol().getTokenIndex()) { appendComment(token); ++numComments; } } while (numComments > 0) { comments.remove(--numComments); } } private static class CommentToken { private Token token; private String whitespaceBefore; public CommentToken(Token token, String whitespaceBefore) { this.token = token; this.whitespaceBefore = whitespaceBefore; } } private static class SyntaxErrorListener extends BaseErrorListener { private List<Exception> errors = new ArrayList<>(); @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { if (!((Token) offendingSymbol).getText().trim().isEmpty()) { errors.add(new Exception(String.format("[%d:%d]: %s", line, charPositionInLine, msg))); } } } static class FormatResult { List<Exception> errors; String output; public FormatResult(List<Exception> errors, String output) { this.errors = errors; this.output = output; } } }
minor tweak to scope just in case
Src/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/CqlFormatterVisitor.java
minor tweak to scope just in case
<ide><path>rc/java/tools/cql-formatter/src/main/java/org/cqframework/cql/tools/formatter/CqlFormatterVisitor.java <ide> } <ide> } <ide> <del> static class FormatResult { <add> public static class FormatResult { <ide> List<Exception> errors; <ide> String output; <ide>
Java
apache-2.0
08023239165f5b4784cb5dde9cfc0fd121b8298e
0
ssilvert/keycloak,stianst/keycloak,abstractj/keycloak,darranl/keycloak,raehalme/keycloak,ahus1/keycloak,hmlnarik/keycloak,ahus1/keycloak,mhajas/keycloak,vmuzikar/keycloak,srose/keycloak,thomasdarimont/keycloak,pedroigor/keycloak,ssilvert/keycloak,abstractj/keycloak,pedroigor/keycloak,pedroigor/keycloak,ssilvert/keycloak,mhajas/keycloak,ahus1/keycloak,raehalme/keycloak,raehalme/keycloak,mhajas/keycloak,hmlnarik/keycloak,hmlnarik/keycloak,stianst/keycloak,hmlnarik/keycloak,mposolda/keycloak,mposolda/keycloak,stianst/keycloak,ssilvert/keycloak,thomasdarimont/keycloak,pedroigor/keycloak,abstractj/keycloak,reneploetz/keycloak,vmuzikar/keycloak,pedroigor/keycloak,darranl/keycloak,vmuzikar/keycloak,hmlnarik/keycloak,keycloak/keycloak,stianst/keycloak,mposolda/keycloak,srose/keycloak,reneploetz/keycloak,keycloak/keycloak,vmuzikar/keycloak,srose/keycloak,jpkrohling/keycloak,darranl/keycloak,mposolda/keycloak,vmuzikar/keycloak,ahus1/keycloak,mhajas/keycloak,srose/keycloak,raehalme/keycloak,raehalme/keycloak,raehalme/keycloak,mhajas/keycloak,thomasdarimont/keycloak,hmlnarik/keycloak,mposolda/keycloak,jpkrohling/keycloak,jpkrohling/keycloak,thomasdarimont/keycloak,keycloak/keycloak,vmuzikar/keycloak,pedroigor/keycloak,reneploetz/keycloak,thomasdarimont/keycloak,abstractj/keycloak,thomasdarimont/keycloak,jpkrohling/keycloak,jpkrohling/keycloak,reneploetz/keycloak,ahus1/keycloak,keycloak/keycloak,ssilvert/keycloak,ahus1/keycloak,darranl/keycloak,srose/keycloak,reneploetz/keycloak,mposolda/keycloak,keycloak/keycloak,stianst/keycloak,abstractj/keycloak
package org.keycloak.testsuite.springboot; import org.hamcrest.Matchers; import org.jboss.arquillian.graphene.page.Page; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.keycloak.OAuth2Constants; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.common.util.Base64Url; import org.keycloak.models.Constants; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.protocol.oidc.OIDCLoginProtocolService; import org.keycloak.representations.AccessTokenResponse; import org.keycloak.representations.idm.*; import org.keycloak.services.resources.LoginActionsService; import org.keycloak.testsuite.ActionURIUtils; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.arquillian.AuthServerTestEnricher; import org.keycloak.testsuite.broker.BrokerTestTools; import org.keycloak.testsuite.pages.AccountUpdateProfilePage; import org.keycloak.testsuite.pages.ErrorPage; import org.keycloak.testsuite.pages.LoginUpdateProfilePage; import org.keycloak.testsuite.util.OAuthClient; import org.keycloak.testsuite.util.WaitUtils; import org.keycloak.util.JsonSerialization; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.core.UriBuilder; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT; import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT_LINKS; import static org.keycloak.testsuite.admin.ApiUtil.createUserAndResetPasswordWithAdminClient; import static org.keycloak.testsuite.util.ServerURLs.getAuthServerContextRoot; import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; import static org.keycloak.testsuite.util.WaitUtils.pause; public class AccountLinkSpringBootTest extends AbstractSpringBootTest { private static final String PARENT_REALM = "parent-realm"; private static final String LINKING_URL = BASE_URL + "/LinkServlet"; private static final String PARENT_USERNAME = "parent-username"; private static final String PARENT_PASSWORD = "parent-password"; private static final String CHILD_USERNAME_1 = "child-username-1"; private static final String CHILD_PASSWORD_1 = "child-password-1"; private static final String CHILD_USERNAME_2 = "child-username-2"; private static final String CHILD_PASSWORD_2 = "child-password-2"; @Page private LinkingPage linkingPage; @Page private AccountUpdateProfilePage profilePage; @Page private LoginUpdateProfilePage loginUpdateProfilePage; @Page private ErrorPage errorPage; @Override public void addTestRealms(List<RealmRepresentation> testRealms) { RealmRepresentation realm = new RealmRepresentation(); realm.setRealm(REALM_NAME); realm.setEnabled(true); realm.setPublicKey(REALM_PUBLIC_KEY); realm.setPrivateKey(REALM_PRIVATE_KEY); realm.setAccessTokenLifespan(600); realm.setAccessCodeLifespan(10); realm.setAccessCodeLifespanUserAction(6000); realm.setSslRequired("external"); ClientRepresentation servlet = new ClientRepresentation(); servlet.setClientId(CLIENT_ID); servlet.setProtocol(OIDCLoginProtocol.LOGIN_PROTOCOL); servlet.setAdminUrl(LINKING_URL); servlet.setDirectAccessGrantsEnabled(true); servlet.setBaseUrl(LINKING_URL); servlet.setRedirectUris(new LinkedList<>()); servlet.getRedirectUris().add(LINKING_URL + "/*"); servlet.setSecret(SECRET); servlet.setFullScopeAllowed(true); realm.setClients(new LinkedList<>()); realm.getClients().add(servlet); testRealms.add(realm); realm = new RealmRepresentation(); realm.setRealm(PARENT_REALM); realm.setEnabled(true); testRealms.add(realm); } @Override public void addUsers() { addIdpUser(); addChildUser(); } @Override public void cleanupUsers() { } @Override public void createRoles() { } @Override protected boolean isImportAfterEachMethod() { return true; } public void addIdpUser() { RealmResource realm = adminClient.realms().realm(PARENT_REALM); UserRepresentation user = new UserRepresentation(); user.setUsername(PARENT_USERNAME); user.setEnabled(true); createUserAndResetPasswordWithAdminClient(realm, user, PARENT_PASSWORD); } private String childUserId = null; public void addChildUser() { RealmResource realm = adminClient.realms().realm(REALM_NAME); UserRepresentation user = new UserRepresentation(); user.setUsername(CHILD_USERNAME_1); user.setEnabled(true); childUserId = createUserAndResetPasswordWithAdminClient(realm, user, CHILD_PASSWORD_1); UserRepresentation user2 = new UserRepresentation(); user2.setUsername(CHILD_USERNAME_2); user2.setEnabled(true); String user2Id = createUserAndResetPasswordWithAdminClient(realm, user2, CHILD_PASSWORD_2); // have to add a role as undertow default auth manager doesn't like "*". todo we can remove this eventually as undertow fixes this in later versions realm.roles().create(new RoleRepresentation(CORRECT_ROLE, null, false)); RoleRepresentation role = realm.roles().get(CORRECT_ROLE).toRepresentation(); List<RoleRepresentation> roles = new LinkedList<>(); roles.add(role); realm.users().get(childUserId).roles().realmLevel().add(roles); realm.users().get(user2Id).roles().realmLevel().add(roles); ClientRepresentation brokerService = realm.clients().findByClientId(Constants.BROKER_SERVICE_CLIENT_ID).get(0); role = realm.clients().get(brokerService.getId()).roles().get(Constants.READ_TOKEN_ROLE).toRepresentation(); roles.clear(); roles.add(role); realm.users().get(childUserId).roles().clientLevel(brokerService.getId()).add(roles); realm.users().get(user2Id).roles().clientLevel(brokerService.getId()).add(roles); } @Before public void createParentChild() { BrokerTestTools.createKcOidcBroker(adminClient, REALM_NAME, PARENT_REALM); testRealmLoginPage.setAuthRealm(REALM_NAME); } @Test public void testErrorConditions() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); ClientRepresentation client = adminClient.realms().realm(REALM_NAME).clients().findByClientId(CLIENT_ID).get(0); UriBuilder redirectUri = UriBuilder.fromUri(LINKING_URL).queryParam("response", "true"); UriBuilder directLinking = UriBuilder.fromUri(getAuthServerContextRoot() + "/auth") .path("realms/{child-realm}/broker/{provider}/link") .queryParam("client_id", CLIENT_ID) .queryParam("redirect_uri", redirectUri.build()) .queryParam("hash", Base64Url.encode("crap".getBytes())) .queryParam("nonce", UUID.randomUUID().toString()); String linkUrl = directLinking .build(REALM_NAME, PARENT_REALM).toString(); // test that child user cannot log into parent realm navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_logged_in")); logoutAll(); // now log in navigateTo(LINKING_URL + "?response=true"); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); linkingPage.assertIsCurrent(); assertThat(linkingPage.getErrorMessage().toLowerCase(), containsString("account linked")); // now test CSRF with bad hash. navigateTo(linkUrl); assertThat(driver.getPageSource(), containsString("We are sorry...")); logoutAll(); // now log in again with client that does not have scope String accountId = adminClient.realms().realm(REALM_NAME).clients().findByClientId(Constants.ACCOUNT_MANAGEMENT_CLIENT_ID).get(0).getId(); RoleRepresentation manageAccount = adminClient.realms().realm(REALM_NAME).clients().get(accountId).roles().get(MANAGE_ACCOUNT).toRepresentation(); RoleRepresentation manageLinks = adminClient.realms().realm(REALM_NAME).clients().get(accountId).roles().get(MANAGE_ACCOUNT_LINKS).toRepresentation(); RoleRepresentation userRole = adminClient.realms().realm(REALM_NAME).roles().get(CORRECT_ROLE).toRepresentation(); client.setFullScopeAllowed(false); ClientResource clientResource = adminClient.realms().realm(REALM_NAME).clients().get(client.getId()); clientResource.update(client); List<RoleRepresentation> roles = new LinkedList<>(); roles.add(userRole); clientResource.getScopeMappings().realmLevel().add(roles); navigateTo(LINKING_URL + "?response=true"); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); linkingPage.assertIsCurrent(); assertThat(linkingPage.getErrorMessage().toLowerCase(), containsString("account linked")); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String clientLinkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(clientLinkUrl); assertThat(driver.getCurrentUrl(), containsString("error=not_allowed")); logoutAll(); // add MANAGE_ACCOUNT_LINKS scope should pass. links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); roles = new LinkedList<>(); roles.add(manageLinks); clientResource.getScopeMappings().clientLevel(accountId).add(roles); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); clientResource.getScopeMappings().clientLevel(accountId).remove(roles); logoutAll(); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_allowed")); logoutAll(); // add MANAGE_ACCOUNT scope should pass links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); roles = new LinkedList<>(); roles.add(manageAccount); clientResource.getScopeMappings().clientLevel(accountId).add(roles); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); clientResource.getScopeMappings().clientLevel(accountId).remove(roles); logoutAll(); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_allowed")); logoutAll(); // undo fullScopeAllowed client = adminClient.realms().realm(REALM_NAME).clients().findByClientId(CLIENT_ID).get(0); client.setFullScopeAllowed(true); clientResource.update(client); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); } @Test public void testAccountLink() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); log.info("linkUrl: " + linkUrl); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); assertThat(driver.getPageSource(), containsString(PARENT_REALM)); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean log.info("After linking: " + driver.getCurrentUrl()); log.info(driver.getPageSource()); assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); OAuthClient.AccessTokenResponse response = oauth.doGrantAccessTokenRequest( REALM_NAME, CHILD_USERNAME_1, CHILD_PASSWORD_1, null, CLIENT_ID, SECRET); assertThat(response.getAccessToken(), is(notNullValue())); assertThat(response.getError(), is(nullValue())); Client httpClient = ClientBuilder.newClient(); String firstToken = getToken(response, httpClient); assertThat(firstToken, is(notNullValue())); navigateTo(linkUrl); assertThat(driver.getPageSource(), containsString("Account linked")); String nextToken = getToken(response, httpClient); assertThat(nextToken, is(notNullValue())); assertThat(firstToken, is(not(equalTo(nextToken)))); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); } @Test public void testLinkOnlyProvider() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); IdentityProviderRepresentation rep = realm.identityProviders().get(PARENT_REALM).toRepresentation(); rep.setLinkOnly(true); realm.identityProviders().get(PARENT_REALM).update(rep); try { List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); // should not be on login page. This is what we are testing assertThat(driver.getPageSource(), not(containsString(PARENT_REALM))); // now test that we can still link. testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); log.info("After linking: " + driver.getCurrentUrl()); log.info(driver.getPageSource()); assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); log.info("testing link-only attack"); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); log.info("login page uri is: " + driver.getCurrentUrl()); // ok, now scrape the code from page String pageSource = driver.getPageSource(); String action = ActionURIUtils.getActionURIFromPageSource(pageSource); System.out.println("action uri: " + action); Map<String, String> queryParams = ActionURIUtils.parseQueryParamsFromActionURI(action); System.out.println("query params: " + queryParams); // now try and use the code to login to remote link-only idp String uri = "/auth/realms/" + REALM_NAME + "/broker/" + PARENT_REALM + "/login"; uri = UriBuilder.fromUri(getAuthServerContextRoot()) .path(uri) .queryParam(LoginActionsService.SESSION_CODE, queryParams.get(LoginActionsService.SESSION_CODE)) .queryParam(Constants.CLIENT_ID, queryParams.get(Constants.CLIENT_ID)) .queryParam(Constants.TAB_ID, queryParams.get(Constants.TAB_ID)) .build().toString(); log.info("hack uri: " + uri); navigateTo(uri); assertThat(driver.getPageSource(), containsString("Could not send authentication request to identity provider.")); } finally { rep.setLinkOnly(false); realm.identityProviders().get(PARENT_REALM).update(rep); } } @Test public void testAccountNotLinkedAutomatically() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); // Login to account mgmt first profilePage.open(REALM_NAME); WaitUtils.waitForPageToLoad(); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); profilePage.assertCurrent(); // Now in another tab, open login screen with "prompt=login" . Login screen will be displayed even if I have SSO cookie UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam(OIDCLoginProtocol.PROMPT_PARAM, OIDCLoginProtocol.PROMPT_VALUE_LOGIN) .build().toString(); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); loginPage.clickSocial(PARENT_REALM); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // Test I was not automatically linked. links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); loginUpdateProfilePage.assertCurrent(); loginUpdateProfilePage.update("Joe", "Doe", "[email protected]"); errorPage.assertCurrent(); assertThat(errorPage.getError(), is(equalTo("You are already authenticated as different user '" + CHILD_USERNAME_1 + "' in this session. Please log out first."))); logoutAll(); // Remove newly created user String newUserId = ApiUtil.findUserByUsername(realm, PARENT_USERNAME).getId(); getCleanup(REALM_NAME).addUserId(newUserId); } @Test public void testAccountLinkingExpired() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); // Login to account mgmt first profilePage.open(REALM_NAME); WaitUtils.waitForPageToLoad(); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); profilePage.assertCurrent(); // Now in another tab, request account linking UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(linkUrl); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); setTimeOffset(1); // We need to "wait" for 1 second so that notBeforePolicy invalidates token created when logging to child realm // Logout "child" userSession in the meantime (for example through admin request) realm.logoutAll(); // Finish login on parent. testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); // Test I was not automatically linked links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); errorPage.assertCurrent(); assertThat(errorPage.getError(), is(equalTo("Requested broker account linking, but current session is no longer valid."))); logoutAll(); navigateTo(linkUrl); // Check we are logged out testRealmLoginPage.setAuthRealm(REALM_NAME); assertCurrentUrlStartsWith(testRealmLoginPage); resetTimeOffset(); } private void navigateTo(String uri) { driver.navigate().to(uri); WaitUtils.waitForPageToLoad(); } public void logoutAll() { String logoutUri = OIDCLoginProtocolService.logoutUrl(authServerPage.createUriBuilder()).build(REALM_NAME).toString(); navigateTo(logoutUri); logoutUri = OIDCLoginProtocolService.logoutUrl(authServerPage.createUriBuilder()).build(PARENT_REALM).toString(); navigateTo(logoutUri); } private String getToken(OAuthClient.AccessTokenResponse response, Client httpClient) throws Exception { log.info("target here is " + OAuthClient.AUTH_SERVER_ROOT); String idpToken = httpClient.target(OAuthClient.AUTH_SERVER_ROOT) .path("realms") .path(REALM_NAME) .path("broker") .path(PARENT_REALM) .path("token") .request() .header("Authorization", "Bearer " + response.getAccessToken()) .get(String.class); AccessTokenResponse res = JsonSerialization.readValue(idpToken, AccessTokenResponse.class); return res.getToken(); } }
testsuite/integration-arquillian/tests/other/springboot-tests/src/test/java/org/keycloak/testsuite/springboot/AccountLinkSpringBootTest.java
package org.keycloak.testsuite.springboot; import org.hamcrest.Matchers; import org.jboss.arquillian.graphene.page.Page; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.keycloak.OAuth2Constants; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.common.util.Base64Url; import org.keycloak.models.Constants; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.protocol.oidc.OIDCLoginProtocolService; import org.keycloak.representations.AccessTokenResponse; import org.keycloak.representations.idm.*; import org.keycloak.services.resources.LoginActionsService; import org.keycloak.testsuite.ActionURIUtils; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.arquillian.AuthServerTestEnricher; import org.keycloak.testsuite.broker.BrokerTestTools; import org.keycloak.testsuite.pages.AccountUpdateProfilePage; import org.keycloak.testsuite.pages.ErrorPage; import org.keycloak.testsuite.pages.LoginUpdateProfilePage; import org.keycloak.testsuite.util.OAuthClient; import org.keycloak.testsuite.util.WaitUtils; import org.keycloak.util.JsonSerialization; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.core.UriBuilder; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT; import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT_LINKS; import static org.keycloak.testsuite.admin.ApiUtil.createUserAndResetPasswordWithAdminClient; import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; import static org.keycloak.testsuite.util.WaitUtils.pause; public class AccountLinkSpringBootTest extends AbstractSpringBootTest { private static final String PARENT_REALM = "parent-realm"; private static final String LINKING_URL = BASE_URL + "/LinkServlet"; private static final String PARENT_USERNAME = "parent-username"; private static final String PARENT_PASSWORD = "parent-password"; private static final String CHILD_USERNAME_1 = "child-username-1"; private static final String CHILD_PASSWORD_1 = "child-password-1"; private static final String CHILD_USERNAME_2 = "child-username-2"; private static final String CHILD_PASSWORD_2 = "child-password-2"; @Page private LinkingPage linkingPage; @Page private AccountUpdateProfilePage profilePage; @Page private LoginUpdateProfilePage loginUpdateProfilePage; @Page private ErrorPage errorPage; @Override public void addTestRealms(List<RealmRepresentation> testRealms) { RealmRepresentation realm = new RealmRepresentation(); realm.setRealm(REALM_NAME); realm.setEnabled(true); realm.setPublicKey(REALM_PUBLIC_KEY); realm.setPrivateKey(REALM_PRIVATE_KEY); realm.setAccessTokenLifespan(600); realm.setAccessCodeLifespan(10); realm.setAccessCodeLifespanUserAction(6000); realm.setSslRequired("external"); ClientRepresentation servlet = new ClientRepresentation(); servlet.setClientId(CLIENT_ID); servlet.setProtocol(OIDCLoginProtocol.LOGIN_PROTOCOL); servlet.setAdminUrl(LINKING_URL); servlet.setDirectAccessGrantsEnabled(true); servlet.setBaseUrl(LINKING_URL); servlet.setRedirectUris(new LinkedList<>()); servlet.getRedirectUris().add(LINKING_URL + "/*"); servlet.setSecret(SECRET); servlet.setFullScopeAllowed(true); realm.setClients(new LinkedList<>()); realm.getClients().add(servlet); testRealms.add(realm); realm = new RealmRepresentation(); realm.setRealm(PARENT_REALM); realm.setEnabled(true); testRealms.add(realm); } @Override public void addUsers() { addIdpUser(); addChildUser(); } @Override public void cleanupUsers() { } @Override public void createRoles() { } @Override protected boolean isImportAfterEachMethod() { return true; } public void addIdpUser() { RealmResource realm = adminClient.realms().realm(PARENT_REALM); UserRepresentation user = new UserRepresentation(); user.setUsername(PARENT_USERNAME); user.setEnabled(true); createUserAndResetPasswordWithAdminClient(realm, user, PARENT_PASSWORD); } private String childUserId = null; public void addChildUser() { RealmResource realm = adminClient.realms().realm(REALM_NAME); UserRepresentation user = new UserRepresentation(); user.setUsername(CHILD_USERNAME_1); user.setEnabled(true); childUserId = createUserAndResetPasswordWithAdminClient(realm, user, CHILD_PASSWORD_1); UserRepresentation user2 = new UserRepresentation(); user2.setUsername(CHILD_USERNAME_2); user2.setEnabled(true); String user2Id = createUserAndResetPasswordWithAdminClient(realm, user2, CHILD_PASSWORD_2); // have to add a role as undertow default auth manager doesn't like "*". todo we can remove this eventually as undertow fixes this in later versions realm.roles().create(new RoleRepresentation(CORRECT_ROLE, null, false)); RoleRepresentation role = realm.roles().get(CORRECT_ROLE).toRepresentation(); List<RoleRepresentation> roles = new LinkedList<>(); roles.add(role); realm.users().get(childUserId).roles().realmLevel().add(roles); realm.users().get(user2Id).roles().realmLevel().add(roles); ClientRepresentation brokerService = realm.clients().findByClientId(Constants.BROKER_SERVICE_CLIENT_ID).get(0); role = realm.clients().get(brokerService.getId()).roles().get(Constants.READ_TOKEN_ROLE).toRepresentation(); roles.clear(); roles.add(role); realm.users().get(childUserId).roles().clientLevel(brokerService.getId()).add(roles); realm.users().get(user2Id).roles().clientLevel(brokerService.getId()).add(roles); } @Before public void createParentChild() { BrokerTestTools.createKcOidcBroker(adminClient, REALM_NAME, PARENT_REALM); testRealmLoginPage.setAuthRealm(REALM_NAME); } @Test public void testErrorConditions() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); ClientRepresentation client = adminClient.realms().realm(REALM_NAME).clients().findByClientId(CLIENT_ID).get(0); UriBuilder redirectUri = UriBuilder.fromUri(LINKING_URL).queryParam("response", "true"); UriBuilder directLinking = UriBuilder.fromUri(getAuthServerContextRoot() + "/auth") .path("realms/{child-realm}/broker/{provider}/link") .queryParam("client_id", CLIENT_ID) .queryParam("redirect_uri", redirectUri.build()) .queryParam("hash", Base64Url.encode("crap".getBytes())) .queryParam("nonce", UUID.randomUUID().toString()); String linkUrl = directLinking .build(REALM_NAME, PARENT_REALM).toString(); // test that child user cannot log into parent realm navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_logged_in")); logoutAll(); // now log in navigateTo(LINKING_URL + "?response=true"); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); linkingPage.assertIsCurrent(); assertThat(linkingPage.getErrorMessage().toLowerCase(), containsString("account linked")); // now test CSRF with bad hash. navigateTo(linkUrl); assertThat(driver.getPageSource(), containsString("We are sorry...")); logoutAll(); // now log in again with client that does not have scope String accountId = adminClient.realms().realm(REALM_NAME).clients().findByClientId(Constants.ACCOUNT_MANAGEMENT_CLIENT_ID).get(0).getId(); RoleRepresentation manageAccount = adminClient.realms().realm(REALM_NAME).clients().get(accountId).roles().get(MANAGE_ACCOUNT).toRepresentation(); RoleRepresentation manageLinks = adminClient.realms().realm(REALM_NAME).clients().get(accountId).roles().get(MANAGE_ACCOUNT_LINKS).toRepresentation(); RoleRepresentation userRole = adminClient.realms().realm(REALM_NAME).roles().get(CORRECT_ROLE).toRepresentation(); client.setFullScopeAllowed(false); ClientResource clientResource = adminClient.realms().realm(REALM_NAME).clients().get(client.getId()); clientResource.update(client); List<RoleRepresentation> roles = new LinkedList<>(); roles.add(userRole); clientResource.getScopeMappings().realmLevel().add(roles); navigateTo(LINKING_URL + "?response=true"); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); linkingPage.assertIsCurrent(); assertThat(linkingPage.getErrorMessage().toLowerCase(), containsString("account linked")); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String clientLinkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(clientLinkUrl); assertThat(driver.getCurrentUrl(), containsString("error=not_allowed")); logoutAll(); // add MANAGE_ACCOUNT_LINKS scope should pass. links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); roles = new LinkedList<>(); roles.add(manageLinks); clientResource.getScopeMappings().clientLevel(accountId).add(roles); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); clientResource.getScopeMappings().clientLevel(accountId).remove(roles); logoutAll(); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_allowed")); logoutAll(); // add MANAGE_ACCOUNT scope should pass links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); roles = new LinkedList<>(); roles.add(manageAccount); clientResource.getScopeMappings().clientLevel(accountId).add(roles); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); clientResource.getScopeMappings().clientLevel(accountId).remove(roles); logoutAll(); navigateTo(clientLinkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); assertThat(driver.getCurrentUrl(), containsString("link_error=not_allowed")); logoutAll(); // undo fullScopeAllowed client = adminClient.realms().realm(REALM_NAME).clients().findByClientId(CLIENT_ID).get(0); client.setFullScopeAllowed(true); clientResource.update(client); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); } @Test public void testAccountLink() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); log.info("linkUrl: " + linkUrl); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); assertThat(driver.getPageSource(), containsString(PARENT_REALM)); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // clean log.info("After linking: " + driver.getCurrentUrl()); log.info(driver.getPageSource()); assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); OAuthClient.AccessTokenResponse response = oauth.doGrantAccessTokenRequest( REALM_NAME, CHILD_USERNAME_1, CHILD_PASSWORD_1, null, CLIENT_ID, SECRET); assertThat(response.getAccessToken(), is(notNullValue())); assertThat(response.getError(), is(nullValue())); Client httpClient = ClientBuilder.newClient(); String firstToken = getToken(response, httpClient); assertThat(firstToken, is(notNullValue())); navigateTo(linkUrl); assertThat(driver.getPageSource(), containsString("Account linked")); String nextToken = getToken(response, httpClient); assertThat(nextToken, is(notNullValue())); assertThat(firstToken, is(not(equalTo(nextToken)))); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); } @Test public void testLinkOnlyProvider() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); IdentityProviderRepresentation rep = realm.identityProviders().get(PARENT_REALM).toRepresentation(); rep.setLinkOnly(true); realm.identityProviders().get(PARENT_REALM).update(rep); try { List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); // should not be on login page. This is what we are testing assertThat(driver.getPageSource(), not(containsString(PARENT_REALM))); // now test that we can still link. testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); log.info("After linking: " + driver.getCurrentUrl()); log.info(driver.getPageSource()); assertThat(driver.getCurrentUrl(), startsWith(linkBuilder.toTemplate())); assertThat(driver.getPageSource(), containsString("Account linked")); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(not(empty()))); realm.users().get(childUserId).removeFederatedIdentity(PARENT_REALM); links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); logoutAll(); log.info("testing link-only attack"); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); log.info("login page uri is: " + driver.getCurrentUrl()); // ok, now scrape the code from page String pageSource = driver.getPageSource(); String action = ActionURIUtils.getActionURIFromPageSource(pageSource); System.out.println("action uri: " + action); Map<String, String> queryParams = ActionURIUtils.parseQueryParamsFromActionURI(action); System.out.println("query params: " + queryParams); // now try and use the code to login to remote link-only idp String uri = "/auth/realms/" + REALM_NAME + "/broker/" + PARENT_REALM + "/login"; uri = UriBuilder.fromUri(getAuthServerContextRoot()) .path(uri) .queryParam(LoginActionsService.SESSION_CODE, queryParams.get(LoginActionsService.SESSION_CODE)) .queryParam(Constants.CLIENT_ID, queryParams.get(Constants.CLIENT_ID)) .queryParam(Constants.TAB_ID, queryParams.get(Constants.TAB_ID)) .build().toString(); log.info("hack uri: " + uri); navigateTo(uri); assertThat(driver.getPageSource(), containsString("Could not send authentication request to identity provider.")); } finally { rep.setLinkOnly(false); realm.identityProviders().get(PARENT_REALM).update(rep); } } @Test public void testAccountNotLinkedAutomatically() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); // Login to account mgmt first profilePage.open(REALM_NAME); WaitUtils.waitForPageToLoad(); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); profilePage.assertCurrent(); // Now in another tab, open login screen with "prompt=login" . Login screen will be displayed even if I have SSO cookie UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam(OIDCLoginProtocol.PROMPT_PARAM, OIDCLoginProtocol.PROMPT_VALUE_LOGIN) .build().toString(); navigateTo(linkUrl); assertCurrentUrlStartsWith(testRealmLoginPage); loginPage.clickSocial(PARENT_REALM); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); testRealmLoginPage.setAuthRealm(REALM_NAME); // Test I was not automatically linked. links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); loginUpdateProfilePage.assertCurrent(); loginUpdateProfilePage.update("Joe", "Doe", "[email protected]"); errorPage.assertCurrent(); assertThat(errorPage.getError(), is(equalTo("You are already authenticated as different user '" + CHILD_USERNAME_1 + "' in this session. Please log out first."))); logoutAll(); // Remove newly created user String newUserId = ApiUtil.findUserByUsername(realm, PARENT_USERNAME).getId(); getCleanup(REALM_NAME).addUserId(newUserId); } @Test public void testAccountLinkingExpired() throws Exception { RealmResource realm = adminClient.realms().realm(REALM_NAME); List<FederatedIdentityRepresentation> links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); // Login to account mgmt first profilePage.open(REALM_NAME); WaitUtils.waitForPageToLoad(); assertCurrentUrlStartsWith(testRealmLoginPage); testRealmLoginPage.form().login(CHILD_USERNAME_1, CHILD_PASSWORD_1); profilePage.assertCurrent(); // Now in another tab, request account linking UriBuilder linkBuilder = UriBuilder.fromUri(LINKING_URL); String linkUrl = linkBuilder.clone() .queryParam("realm", REALM_NAME) .queryParam("provider", PARENT_REALM).build().toString(); navigateTo(linkUrl); testRealmLoginPage.setAuthRealm(PARENT_REALM); assertCurrentUrlStartsWith(testRealmLoginPage); setTimeOffset(1); // We need to "wait" for 1 second so that notBeforePolicy invalidates token created when logging to child realm // Logout "child" userSession in the meantime (for example through admin request) realm.logoutAll(); // Finish login on parent. testRealmLoginPage.form().login(PARENT_USERNAME, PARENT_PASSWORD); // Test I was not automatically linked links = realm.users().get(childUserId).getFederatedIdentity(); assertThat(links, is(empty())); errorPage.assertCurrent(); assertThat(errorPage.getError(), is(equalTo("Requested broker account linking, but current session is no longer valid."))); logoutAll(); navigateTo(linkUrl); // Check we are logged out testRealmLoginPage.setAuthRealm(REALM_NAME); assertCurrentUrlStartsWith(testRealmLoginPage); resetTimeOffset(); } private void navigateTo(String uri) { driver.navigate().to(uri); WaitUtils.waitForPageToLoad(); } public void logoutAll() { String logoutUri = OIDCLoginProtocolService.logoutUrl(authServerPage.createUriBuilder()).build(REALM_NAME).toString(); navigateTo(logoutUri); logoutUri = OIDCLoginProtocolService.logoutUrl(authServerPage.createUriBuilder()).build(PARENT_REALM).toString(); navigateTo(logoutUri); } private String getToken(OAuthClient.AccessTokenResponse response, Client httpClient) throws Exception { log.info("target here is " + OAuthClient.AUTH_SERVER_ROOT); String idpToken = httpClient.target(OAuthClient.AUTH_SERVER_ROOT) .path("realms") .path(REALM_NAME) .path("broker") .path(PARENT_REALM) .path("token") .request() .header("Authorization", "Bearer " + response.getAccessToken()) .get(String.class); AccessTokenResponse res = JsonSerialization.readValue(idpToken, AccessTokenResponse.class); return res.getToken(); } }
KEYCLOAK-14778 Springboot tests fails with compilation error
testsuite/integration-arquillian/tests/other/springboot-tests/src/test/java/org/keycloak/testsuite/springboot/AccountLinkSpringBootTest.java
KEYCLOAK-14778 Springboot tests fails with compilation error
<ide><path>estsuite/integration-arquillian/tests/other/springboot-tests/src/test/java/org/keycloak/testsuite/springboot/AccountLinkSpringBootTest.java <ide> import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT; <ide> import static org.keycloak.models.AccountRoles.MANAGE_ACCOUNT_LINKS; <ide> import static org.keycloak.testsuite.admin.ApiUtil.createUserAndResetPasswordWithAdminClient; <add>import static org.keycloak.testsuite.util.ServerURLs.getAuthServerContextRoot; <ide> import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith; <ide> import static org.keycloak.testsuite.util.WaitUtils.pause; <ide>
Java
apache-2.0
941bdac746af8a0fbca5c3c514b4e656b3915aeb
0
ashensw/carbon-ml,nirmal070125/carbon-ml,manoramahp/carbon-ml,danula/carbon-ml,SupunS/carbon-ml,maheshakya/carbon-ml,ashensw/carbon-ml,nirmal070125/carbon-ml,nirmal070125/carbon-ml,wso2/carbon-ml,SupunS/carbon-ml,cdathuraliya/carbon-ml,nectoc/carbon-ml,danula/carbon-ml,maheshakya/carbon-ml,cdathuraliya/carbon-ml,danula/carbon-ml,manoramahp/carbon-ml,wso2/carbon-ml,wso2/carbon-ml,fazlan-nazeem/carbon-ml,cdathuraliya/carbon-ml,fazlan-nazeem/carbon-ml,maheshakya/carbon-ml,nectoc/carbon-ml,nectoc/carbon-ml,SupunS/carbon-ml,danula/carbon-ml,fazlan-nazeem/carbon-ml,ashensw/carbon-ml,manoramahp/carbon-ml
package org.wso2.carbon.ml.ui.helper; import java.rmi.RemoteException; import org.apache.axis2.AxisFault; import org.apache.axis2.client.Options; import org.apache.axis2.client.ServiceClient; import org.apache.axis2.context.ConfigurationContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.ml.dataset.stub.DatasetServiceStub; import org.wso2.carbon.ml.dataset.xsd.Feature; public class DatasetServiceClient { private static final Log LOGGER = LogFactory .getLog(DatasetServiceClient.class); private DatasetServiceStub stub; public DatasetServiceClient(ConfigurationContext configCtx, String backendServerURL, String cookie) throws DatasetServiceClientException { try { String serviceURL = backendServerURL + "DatasetService"; stub = new DatasetServiceStub(configCtx, serviceURL); ServiceClient client = stub._getServiceClient(); Options options = client.getOptions(); options.setManageSession(true); options.setProperty( org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING, cookie); } catch (AxisFault ex) { String msg = "An error has occurred while initilizing the DatasetServiceStub, error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public int importDataset(String datasetName) throws DatasetServiceClientException { try { return stub.importData(datasetName); } catch (Exception ex) { String msg = "An error has occurred while calling importData() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public String getDatasetUploadingDir() throws DatasetServiceClientException { try { return stub.getDatasetUploadingDir(); } catch (Exception ex) { String msg = "An error has occurred while calling getDatasetUploadingDir() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public int getDatasetInMemoryThreshold() throws DatasetServiceClientException { try { return stub.getDatasetInMemoryThreshold(); } catch (RemoteException ex) { String msg = "An error has occurred while calling getDatasetInMemoryThreshold() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public long getDatasetUploadingLimit() throws DatasetServiceClientException { try { return stub.getDatasetUploadingLimit(); } catch (RemoteException ex) { String msg = "An error has occurred while calling getDatasetUploadingLimit() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public Feature[] getFeatures(int start, int numberOfFeatures) throws DatasetServiceClientException { try { return stub.getFeatures(0, 0, 10); } catch (Exception ex) { String msg = "An error has occurred while calling getFeatures() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } }
ml/ml-components/org.wso2.carbon.ml.ui/src/main/java/org/wso2/carbon/ml/ui/helper/DatasetServiceClient.java
package org.wso2.carbon.ml.ui.helper; import java.rmi.RemoteException; import org.apache.axis2.AxisFault; import org.apache.axis2.client.Options; import org.apache.axis2.client.ServiceClient; import org.apache.axis2.context.ConfigurationContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.ml.dataset.stub.DatasetServiceStub; import org.wso2.carbon.ml.dataset.xsd.Feature; public class DatasetServiceClient { private static final Log LOGGER = LogFactory .getLog(DatasetServiceClient.class); private DatasetServiceStub stub; public DatasetServiceClient(ConfigurationContext configCtx, String backendServerURL, String cookie) throws DatasetServiceClientException { try { String serviceURL = backendServerURL + "DatasetService"; stub = new DatasetServiceStub(configCtx, serviceURL); ServiceClient client = stub._getServiceClient(); Options options = client.getOptions(); options.setManageSession(true); options.setProperty( org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING, cookie); } catch (AxisFault ex) { String msg = "An error has occurred while initilizing the DatasetServiceStub, error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public int importDataset(String datasetName) throws DatasetServiceClientException { try { return stub.importData(datasetName); } catch (Exception ex) { String msg = "An error has occurred while calling importData() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public String getDatasetUploadingDir() throws DatasetServiceClientException { try { return stub.getDatasetUploadingDir(); } catch (RemoteException ex) { String msg = "An error has occurred while calling getDatasetUploadingDir() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public int getDatasetInMemoryThreshold() throws DatasetServiceClientException { try { return stub.getDatasetInMemoryThreshold(); } catch (RemoteException ex) { String msg = "An error has occurred while calling getDatasetInMemoryThreshold() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public long getDatasetUploadingLimit() throws DatasetServiceClientException { try { return stub.getDatasetUploadingLimit(); } catch (RemoteException ex) { String msg = "An error has occurred while calling getDatasetUploadingLimit() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } public Feature[] getFeatures(int start, int numberOfFeatures) throws DatasetServiceClientException { try { return stub.getFeatures(0, 0, 10); } catch (Exception ex) { String msg = "An error has occurred while calling getFeatures() error message: " + ex.getMessage(); LOGGER.error(msg, ex); throw new DatasetServiceClientException(msg); } } }
changed the exception type
ml/ml-components/org.wso2.carbon.ml.ui/src/main/java/org/wso2/carbon/ml/ui/helper/DatasetServiceClient.java
changed the exception type
<ide><path>l/ml-components/org.wso2.carbon.ml.ui/src/main/java/org/wso2/carbon/ml/ui/helper/DatasetServiceClient.java <ide> throws DatasetServiceClientException { <ide> try { <ide> return stub.getDatasetUploadingDir(); <del> } catch (RemoteException ex) { <add> } catch (Exception ex) { <ide> String msg = "An error has occurred while calling getDatasetUploadingDir() error message: " <ide> + ex.getMessage(); <ide> LOGGER.error(msg, ex);
Java
apache-2.0
b44033817d3f9e74a7a06fc69e5a9ec5fdd2e52b
0
pferraro/undertow,darranl/undertow,undertow-io/undertow,jamezp/undertow,baranowb/undertow,darranl/undertow,darranl/undertow,undertow-io/undertow,jamezp/undertow,baranowb/undertow,rhusar/undertow,jstourac/undertow,jamezp/undertow,soul2zimate/undertow,jstourac/undertow,undertow-io/undertow,baranowb/undertow,soul2zimate/undertow,rhusar/undertow,jstourac/undertow,rhusar/undertow,soul2zimate/undertow,pferraro/undertow,pferraro/undertow
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.security; import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; import io.undertow.testutils.DefaultServer; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.ldif.LdifEntry; import org.apache.directory.api.ldap.model.ldif.LdifReader; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.server.core.api.CoreSession; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory; import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.factory.PartitionFactory; import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; import org.apache.directory.server.kerberos.KerberosConfig; import org.apache.directory.server.kerberos.kdc.KdcServer; import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.protocol.shared.transport.TcpTransport; import org.apache.directory.server.protocol.shared.transport.Transport; import org.apache.directory.server.protocol.shared.transport.UdpTransport; /** * Utility class to start up a test KDC backed by a directory server. * * It is better to start the server once instead of once per test but once running * the overhead is minimal. However a better solution may be to use the {@link Suite} * runner but we currently need to use the {@link DefaultServer} runner. * * TODO - May be able to add some lifecycle methods to DefaultServer to allow * for an extension. * * @author <a href="mailto:[email protected]">Darran Lofthouse</a> */ class KerberosKDCUtil { private static final boolean IS_IBM = System.getProperty("java.vendor").contains("IBM"); static final int LDAP_PORT = 11389; static final int KDC_PORT = 6088; private static final String DIRECTORY_NAME = "Test Service"; private static boolean initialised; private static Path workingDir; /* * LDAP Related */ private static DirectoryService directoryService; private static LdapServer ldapServer; /* * KDC Related */ private static KdcServer kdcServer; public static boolean startServer() throws Exception { if (initialised) { return false; } setupEnvironment(); startLdapServer(); startKDC(); initialised = true; return true; } private static void startLdapServer() throws Exception { createWorkingDir(); DirectoryServiceFactory dsf = new DefaultDirectoryServiceFactory(); dsf.init(DIRECTORY_NAME); directoryService = dsf.getDirectoryService(); directoryService.addLast(new KeyDerivationInterceptor()); // Derives the Kerberos keys for new entries. directoryService.getChangeLog().setEnabled(false); SchemaManager schemaManager = directoryService.getSchemaManager(); createPartition(dsf, schemaManager, "users", "ou=users,dc=undertow,dc=io"); CoreSession adminSession = directoryService.getAdminSession(); Map<String, String> mappings = Collections.singletonMap("hostname", DefaultServer.getDefaultServerAddress().getHostString()); processLdif(schemaManager, adminSession, "partition.ldif", mappings); processLdif(schemaManager, adminSession, "krbtgt.ldif", mappings); processLdif(schemaManager, adminSession, "user.ldif", mappings); processLdif(schemaManager, adminSession, "server.ldif", mappings); ldapServer = new LdapServer(); ldapServer.setServiceName("DefaultLDAP"); Transport ldap = new TcpTransport( "0.0.0.0", LDAP_PORT, 3, 5 ); ldapServer.addTransports(ldap); ldapServer.setDirectoryService(directoryService); ldapServer.start(); } private static void createPartition(final DirectoryServiceFactory dsf, final SchemaManager schemaManager, final String id, final String suffix) throws Exception { PartitionFactory pf = dsf.getPartitionFactory(); Partition p = pf.createPartition(schemaManager, id, suffix, 1000, workingDir.toFile()); pf.addIndex(p, "krb5PrincipalName", 10); p.initialize(); directoryService.addPartition(p); } private static void processLdif(final SchemaManager schemaManager, final CoreSession adminSession, final String ldifName, final Map<String, String> mappings) throws Exception { InputStream resourceInput = KerberosKDCUtil.class.getResourceAsStream("/ldif/" + ldifName); ByteArrayOutputStream baos = new ByteArrayOutputStream(resourceInput.available()); int current; while ((current = resourceInput.read()) != -1) { if (current == '$') { // Enter String replacement mode. int second = resourceInput.read(); if (second == '{') { ByteArrayOutputStream substitute = new ByteArrayOutputStream(); while ((current = resourceInput.read()) != -1 && current != '}') { substitute.write(current); } if (current == -1) { baos.write(current); baos.write(second); baos.write(substitute.toByteArray()); // Terminator never found. } String toReplace = new String(substitute.toByteArray(), StandardCharsets.UTF_8); if (mappings.containsKey(toReplace)) { baos.write(mappings.get(toReplace).getBytes()); } else { throw new IllegalArgumentException(String.format("No mapping found for '%s'", toReplace)); } } else { baos.write(current); baos.write(second); } } else { baos.write(current); } } ByteArrayInputStream ldifInput = new ByteArrayInputStream(baos.toByteArray()); LdifReader ldifReader = new LdifReader(ldifInput); for (LdifEntry ldifEntry : ldifReader) { adminSession.add(new DefaultEntry(schemaManager, ldifEntry.getEntry())); } ldifReader.close(); ldifInput.close(); } private static void startKDC() throws Exception { kdcServer = new KdcServer(); kdcServer.setServiceName("Test KDC"); kdcServer.setSearchBaseDn("ou=users,dc=undertow,dc=io"); KerberosConfig config = kdcServer.getConfig(); config.setServicePrincipal("krbtgt/[email protected]"); config.setPrimaryRealm("UNDERTOW.IO"); config.setPaEncTimestampRequired(false); UdpTransport udp = new UdpTransport("0.0.0.0", KDC_PORT); kdcServer.addTransports(udp); kdcServer.setDirectoryService(directoryService); kdcServer.start(); } private static void setupEnvironment() { final URL configPath = KerberosKDCUtil.class.getResource("/krb5.conf"); try { System.setProperty("java.security.krb5.conf", Paths.get(configPath.toURI()).normalize().toAbsolutePath().toString()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } private static void createWorkingDir() throws IOException { if (workingDir == null) { workingDir = Paths.get(".", "target", "apacheds_working"); if (!Files.exists(workingDir)) { Files.createDirectories(workingDir); } } try(DirectoryStream<Path> stream = Files.newDirectoryStream(workingDir)) { for(Path child : stream) { Files.delete(child); } } } static Subject login(final String userName, final char[] password) throws LoginException { Subject theSubject = new Subject(); CallbackHandler cbh = new UsernamePasswordCBH(userName, password); LoginContext lc = new LoginContext("KDC", theSubject, cbh, createJaasConfiguration()); lc.login(); return theSubject; } private static Configuration createJaasConfiguration() { return new Configuration() { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { if (!"KDC".equals(name)) { throw new IllegalArgumentException("Unexpected name '" + name + "'"); } AppConfigurationEntry[] entries = new AppConfigurationEntry[1]; Map<String, Object> options = new HashMap<>(); options.put("debug", "true"); options.put("refreshKrb5Config", "true"); if (IS_IBM) { options.put("noAddress", "true"); options.put("credsType", "both"); entries[0] = new AppConfigurationEntry("com.ibm.security.auth.module.Krb5LoginModule", REQUIRED, options); } else { options.put("storeKey", "true"); options.put("isInitiator", "true"); entries[0] = new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", REQUIRED, options); } return entries; } }; } private static class UsernamePasswordCBH implements CallbackHandler { /* * Note: We use CallbackHandler implementations like this in test cases as test cases need to run unattended, a true * CallbackHandler implementation should interact directly with the current user to prompt for the username and * password. * * i.e. In a client app NEVER prompt for these values in advance and provide them to a CallbackHandler like this. */ private final String username; private final char[] password; private UsernamePasswordCBH(final String username, final char[] password) { this.username = username; this.password = password; } @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (Callback current : callbacks) { if (current instanceof NameCallback) { NameCallback ncb = (NameCallback) current; ncb.setName(username); } else if (current instanceof PasswordCallback) { PasswordCallback pcb = (PasswordCallback) current; pcb.setPassword(password); } else { throw new UnsupportedCallbackException(current); } } } } }
core/src/test/java/io/undertow/server/security/KerberosKDCUtil.java
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.security; import static javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag.REQUIRED; import io.undertow.testutils.DefaultServer; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.ldif.LdifEntry; import org.apache.directory.api.ldap.model.ldif.LdifReader; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.server.core.api.CoreSession; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory; import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.factory.PartitionFactory; import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; import org.apache.directory.server.kerberos.KerberosConfig; import org.apache.directory.server.kerberos.kdc.KdcServer; import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.protocol.shared.transport.TcpTransport; import org.apache.directory.server.protocol.shared.transport.Transport; import org.apache.directory.server.protocol.shared.transport.UdpTransport; /** * Utility class to start up a test KDC backed by a directory server. * * It is better to start the server once instead of once per test but once running * the overhead is minimal. However a better solution may be to use the {@link Suite} * runner but we currently need to use the {@link DefaultServer} runner. * * TODO - May be able to add some lifecycle methods to DefaultServer to allow * for an extension. * * @author <a href="mailto:[email protected]">Darran Lofthouse</a> */ class KerberosKDCUtil { private static final boolean IS_IBM = System.getProperty("java.vendor").contains("IBM"); static final int LDAP_PORT = 11389; static final int KDC_PORT = 6088; private static final String DIRECTORY_NAME = "Test Service"; private static boolean initialised; private static Path workingDir; /* * LDAP Related */ private static DirectoryService directoryService; private static LdapServer ldapServer; /* * KDC Related */ private static KdcServer kdcServer; public static boolean startServer() throws Exception { if (initialised) { return false; } setupEnvironment(); startLdapServer(); startKDC(); initialised = true; return true; } private static void startLdapServer() throws Exception { createWorkingDir(); DirectoryServiceFactory dsf = new DefaultDirectoryServiceFactory(); dsf.init(DIRECTORY_NAME); directoryService = dsf.getDirectoryService(); directoryService.addLast(new KeyDerivationInterceptor()); // Derives the Kerberos keys for new entries. directoryService.getChangeLog().setEnabled(false); SchemaManager schemaManager = directoryService.getSchemaManager(); createPartition(dsf, schemaManager, "users", "ou=users,dc=undertow,dc=io"); CoreSession adminSession = directoryService.getAdminSession(); Map<String, String> mappings = Collections.singletonMap("hostname", DefaultServer.getDefaultServerAddress().getHostString()); processLdif(schemaManager, adminSession, "partition.ldif", mappings); processLdif(schemaManager, adminSession, "krbtgt.ldif", mappings); processLdif(schemaManager, adminSession, "user.ldif", mappings); processLdif(schemaManager, adminSession, "server.ldif", mappings); ldapServer = new LdapServer(); ldapServer.setServiceName("DefaultLDAP"); Transport ldap = new TcpTransport( "0.0.0.0", LDAP_PORT, 3, 5 ); ldapServer.addTransports(ldap); ldapServer.setDirectoryService(directoryService); ldapServer.start(); } private static void createPartition(final DirectoryServiceFactory dsf, final SchemaManager schemaManager, final String id, final String suffix) throws Exception { PartitionFactory pf = dsf.getPartitionFactory(); Partition p = pf.createPartition(schemaManager, id, suffix, 1000, workingDir.toFile()); pf.addIndex(p, "krb5PrincipalName", 10); p.initialize(); directoryService.addPartition(p); } private static void processLdif(final SchemaManager schemaManager, final CoreSession adminSession, final String ldifName, final Map<String, String> mappings) throws Exception { InputStream resourceInput = KerberosKDCUtil.class.getResourceAsStream("/ldif/" + ldifName); ByteArrayOutputStream baos = new ByteArrayOutputStream(resourceInput.available()); int current; while ((current = resourceInput.read()) != -1) { if (current == '$') { // Enter String replacement mode. int second = resourceInput.read(); if (second == '{') { ByteArrayOutputStream substitute = new ByteArrayOutputStream(); while ((current = resourceInput.read()) != -1 && current != '}') { substitute.write(current); } if (current == -1) { baos.write(current); baos.write(second); baos.write(substitute.toByteArray()); // Terminator never found. } String toReplace = new String(substitute.toByteArray(), StandardCharsets.UTF_8); if (mappings.containsKey(toReplace)) { baos.write(mappings.get(toReplace).getBytes()); } else { throw new IllegalArgumentException(String.format("No mapping found for '%s'", toReplace)); } } else { baos.write(current); baos.write(second); } } else { baos.write(current); } } ByteArrayInputStream ldifInput = new ByteArrayInputStream(baos.toByteArray()); LdifReader ldifReader = new LdifReader(ldifInput); for (LdifEntry ldifEntry : ldifReader) { adminSession.add(new DefaultEntry(schemaManager, ldifEntry.getEntry())); } ldifReader.close(); ldifInput.close(); } private static void startKDC() throws Exception { kdcServer = new KdcServer(); kdcServer.setServiceName("Test KDC"); kdcServer.setSearchBaseDn("ou=users,dc=undertow,dc=io"); KerberosConfig config = kdcServer.getConfig(); config.setServicePrincipal("krbtgt/[email protected]"); config.setPrimaryRealm("UNDERTOW.IO"); config.setPaEncTimestampRequired(false); UdpTransport udp = new UdpTransport("0.0.0.0", KDC_PORT); kdcServer.addTransports(udp); kdcServer.setDirectoryService(directoryService); kdcServer.start(); } private static void setupEnvironment() { final URL configPath = KerberosKDCUtil.class.getResource("/krb5.conf"); System.setProperty("java.security.krb5.conf", configPath.getFile()); } private static void createWorkingDir() throws IOException { if (workingDir == null) { workingDir = Paths.get(".", "target", "apacheds_working"); if (!Files.exists(workingDir)) { Files.createDirectories(workingDir); } } try(DirectoryStream<Path> stream = Files.newDirectoryStream(workingDir)) { for(Path child : stream) { Files.delete(child); } } } static Subject login(final String userName, final char[] password) throws LoginException { Subject theSubject = new Subject(); CallbackHandler cbh = new UsernamePasswordCBH(userName, password); LoginContext lc = new LoginContext("KDC", theSubject, cbh, createJaasConfiguration()); lc.login(); return theSubject; } private static Configuration createJaasConfiguration() { return new Configuration() { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { if (!"KDC".equals(name)) { throw new IllegalArgumentException("Unexpected name '" + name + "'"); } AppConfigurationEntry[] entries = new AppConfigurationEntry[1]; Map<String, Object> options = new HashMap<>(); options.put("debug", "true"); options.put("refreshKrb5Config", "true"); if (IS_IBM) { options.put("noAddress", "true"); options.put("credsType", "both"); entries[0] = new AppConfigurationEntry("com.ibm.security.auth.module.Krb5LoginModule", REQUIRED, options); } else { options.put("storeKey", "true"); options.put("isInitiator", "true"); entries[0] = new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", REQUIRED, options); } return entries; } }; } private static class UsernamePasswordCBH implements CallbackHandler { /* * Note: We use CallbackHandler implementations like this in test cases as test cases need to run unattended, a true * CallbackHandler implementation should interact directly with the current user to prompt for the username and * password. * * i.e. In a client app NEVER prompt for these values in advance and provide them to a CallbackHandler like this. */ private final String username; private final char[] password; private UsernamePasswordCBH(final String username, final char[] password) { this.username = username; this.password = password; } @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { for (Callback current : callbacks) { if (current instanceof NameCallback) { NameCallback ncb = (NameCallback) current; ncb.setName(username); } else if (current instanceof PasswordCallback) { PasswordCallback pcb = (PasswordCallback) current; pcb.setPassword(password); } else { throw new UnsupportedCallbackException(current); } } } } }
UNDERTOW-1765 Fix SPNEGO tests on Windows JDK11
core/src/test/java/io/undertow/server/security/KerberosKDCUtil.java
UNDERTOW-1765 Fix SPNEGO tests on Windows JDK11
<ide><path>ore/src/test/java/io/undertow/server/security/KerberosKDCUtil.java <ide> import java.io.ByteArrayOutputStream; <ide> import java.io.IOException; <ide> import java.io.InputStream; <add>import java.net.URISyntaxException; <ide> import java.net.URL; <ide> import java.nio.charset.StandardCharsets; <ide> import java.nio.file.DirectoryStream; <ide> <ide> private static void setupEnvironment() { <ide> final URL configPath = KerberosKDCUtil.class.getResource("/krb5.conf"); <del> System.setProperty("java.security.krb5.conf", configPath.getFile()); <add> try { <add> System.setProperty("java.security.krb5.conf", Paths.get(configPath.toURI()).normalize().toAbsolutePath().toString()); <add> } catch (URISyntaxException e) { <add> throw new RuntimeException(e); <add> } <ide> } <ide> <ide> private static void createWorkingDir() throws IOException {
Java
apache-2.0
3b33b1c1a1e40ad8bec42fe2b4b18f98cc36ee3c
0
cache2k/cache2k,cache2k/cache2k,cache2k/cache2k
package org.cache2k; /* * #%L * cache2k API * %% * Copyright (C) 2000 - 2019 headissue GmbH, Munich * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.EventListener; /** * A listener implemented by the cache client to get notification about the * completion of a load or prefetch operation. * * @author Jens Wilke * @see Cache#loadAll(Iterable, CacheOperationCompletionListener) * @see AdvancedKeyValueSource#prefetchAll(Iterable, CacheOperationCompletionListener) */ public interface CacheOperationCompletionListener extends EventListener { /** * Signals the completion of a {@link Cache#loadAll}, {@link Cache#reloadAll} or * {@link Cache#prefetchAll} operation. */ void onCompleted(); /** * The operation could not completed, because of an error. * * <p>In the current implementation, there is no condition which raises a call to this method. * Errors while loading a value, will be delayed and propagated when the respective key * is accessed. This is subject to the resilience configuration. */ void onException(Throwable exception); }
cache2k-api/src/main/java/org/cache2k/CacheOperationCompletionListener.java
package org.cache2k; /* * #%L * cache2k API * %% * Copyright (C) 2000 - 2019 headissue GmbH, Munich * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.EventListener; /** * A listener implemented by the cache client to get notification about the * completion of a load or prefetch operation. * * @author Jens Wilke * @see Cache#loadAll(Iterable, CacheOperationCompletionListener) * @see AdvancedKeyValueSource#prefetchAll(Iterable, CacheOperationCompletionListener) */ public interface CacheOperationCompletionListener extends EventListener { /** * Signals the completion of a {@link Cache#loadAll}, {@link Cache#reloadAll} or * {@link Cache#prefetchAll} operation. */ void onCompleted(); /** * The operation could not completed, because of an error. * * <p>In the current implementation, there is no condition which raises a call to this method. * Errors while loading a value, will be delayed and propagated when the respective key * is accessed. This is subject to the resilience configuration. * * <p>The method may be used in the future for some general failure condition during load. * Applications should propagate the exception properly and not only log it. */ void onException(Throwable exception); }
doc touch up
cache2k-api/src/main/java/org/cache2k/CacheOperationCompletionListener.java
doc touch up
<ide><path>ache2k-api/src/main/java/org/cache2k/CacheOperationCompletionListener.java <ide> * <p>In the current implementation, there is no condition which raises a call to this method. <ide> * Errors while loading a value, will be delayed and propagated when the respective key <ide> * is accessed. This is subject to the resilience configuration. <del> * <del> * <p>The method may be used in the future for some general failure condition during load. <del> * Applications should propagate the exception properly and not only log it. <ide> */ <ide> void onException(Throwable exception); <ide>
JavaScript
apache-2.0
34b75205e48005f1ae1ef5d5bdcb61f7ff16a608
0
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
// Class for showing a GUI for correcting transcripts of OCR batches // function OcrTranscript(insertinto_id, batch_id) { var m_batch_id = batch_id; var m_page = 0; var m_batchdata = null; // editor for each line var m_editor = null; // page data cache var m_pagedata = null; // alias 'this' for use from within callbacks var self = this; // UI bits it's useful to keep a reference to: var m_container = $("<div></div>") .addClass("widget"); // .css("width", "400px") // .css("height", "200px") // .draggable({ // stack: ".widget", // snap: "#workspace", // handle: "#batch_head", // }).resizable({ // minWidth: 300, // resize: function(e, ui) { // $(".transcript_lines") // .css("min-height", $(this).height() - 45); // }, // }).sortable({connectWith: ".widget"}); var m_header = $("<div></div>") .addClass("batch_head") .addClass("widget_header") .attr("id", "batch_head") .text("OCR Batch"); var m_pagename = $("<span></span>") .attr("id", "page_name"); var m_pagecount = $("<span></span>") .attr("id", "page_count"); var m_scroller = $("<div></div>") .attr("id", "scroll_container"); var m_pagediv = $("<div></div>") .addClass("waiting") .addClass("transcript_lines") .attr("id", "transcript_lines"); //.css("min-height", m_container.height() - 45); this.init = function() { self.buildUi(); self.refresh(); } this.refresh = function() { setData(); } this.setBatchId = function(batch_id) { m_batch_id = batch_id; self.refresh(); } this.setPage = function(page_index) { m_page = page_index; self.refreshPageData(); } // set a waiting spinner when doing something this.setWaiting = function(waiting) { m_pagediv.toggleClass("waiting", waiting); } this.page = function() { return m_page; } this.pageCount = function() { return m_batchdata.extras.task_count; } this.pageData = function() { return m_pagedata; } this.refresh = function() { $.ajax({ url: "/batch/results/" + m_batch_id + "/?start=" + m_page + "&end=" + (m_page + 1), dataType: "json", beforeSend: function(e) { self.setWaiting(true); }, complete: function(e) { self.setWaiting(false); }, success: function(data) { if (data == null) { alert("Unable to retrieve page data."); } else if (data.error) { alert(data.error); } m_batchdata = data[0]; self.onBatchLoad(); self.refreshPageData(); }, }); } this.refreshPageData = function() { $.ajax({ url: "/batch/results/" + m_batch_id + "/" + m_page + "/", data: {}, dataType: "json", beforeSend: function(e) { self.setWaiting(true); }, complete: function(e) { self.setWaiting(false); }, success: function(data) { if (data == null) { alert("Unable to retrieve page data."); } else if (data.error) { alert(data.error); } else if (data.length != 1) { alert("Data length error - should be 1 element long"); } else { m_pagedata = data[0]; self.onPageLoad(); setPageLines(data[0]); } }, }); self.onPageChange(); } this.buildUi = function() { m_container.append( m_header.append(m_pagecount).append(m_pagename)) .append(m_scroller.append(m_pagediv)) .appendTo("#" + insertinto_id); } /* * Events */ // m_batchdiv.bind("mouseup", function(event) { // var sel = window.getSelection(); // if (sel.toString() == "" || sel.rangeCount > 1) // return; // // var elem = $(sel.baseNode.parentElement); // $(document).bind("keydown.lineedit", function(e) { // if (e.which == 27) { // escape // alert("escape"); // $(document).unbind(".lineedit"); // return false; // } else if (e.which == 13) { // alert("return"); // $(document).unbind(".lineedit"); // return false; // } else { // alert(e.which); // } // }); // }); $(".ocr_line").live("mouseover mouseout", function(event) { if (event.type == "mouseover") { $(this).addClass("hover"); } else { $(this).removeClass("hover"); } }); $(".ocr_line").live("click", function(event) { if (m_editor == null) { m_editor = new OcrLineEditor(insertinto_id); m_editor.setElement(this, event); } else if (m_editor.element() && m_editor.element().get(0) === this) { // don't do anything - we're already editing it } else { m_editor.setElement(this, event); } self.onClickPosition($(this).data("bbox")); }); var setPageLines = function(data) { m_pagecount.text("Page " + (m_page + 1) + " of " + m_batchdata.extras.task_count); m_pagename.text(data.fields.page_name); m_pagediv.find(".ocr_line").remove(); m_pagediv.data("bbox", data.fields.results.box); $.each(data.fields.results.lines, function(linenum, line) { lspan = $("<span></span>") .text(line.text) .addClass("ocr_line") .data("bbox", line.box); m_pagediv.append(lspan); }); self.insertBreaks(); } // // Layout: Functions for arranging the lines in certain ways // TODO: Remove code dup between this and the ocr_page.js // file. // // parse bbox="0 20 500 300" into [0, 20, 500, 300] var parseBoundingBoxAttr = function(bbox) { var dims = [-1, -1, -1, -1]; if (bbox.match(boxpattern)) { dims[0] = parseInt(RegExp.$1); dims[1] = parseInt(RegExp.$2); dims[2] = parseInt(RegExp.$3); dims[3] = parseInt(RegExp.$4); } return dims; } // Fudgy function to insert line breaks (<br />) in places // where there are large gaps between lines. Significantly // improves the look of a block of OCR'd text. this.insertBreaks = function() { // insert space between each line $("<span></span>").text("\u00a0").insertBefore( m_pagediv.find(".ocr_line").first().nextAll()); var lastyh = -1; var lasth = -1; var lastitem; m_pagediv.removeClass("literal"); m_pagediv.children(".ocr_line").each(function(lnum, item) { var dims = $(item).data("bbox"); var y = dims[1]; // bbox x, y, w, h var h = dims[3]; if (dims[0] != -1) { $(item).attr("style", ""); $(item).children("br").remove(); if ((lastyh != -1 && lasth != -1) && (y - (h * 0.75) > lastyh || lasth < (h * 0.75))) { $(lastitem).after($("<br />")).after($("<br />")); } lastitem = item; lastyh = y + h; lasth = h; } }); m_pagediv.css("height", null); } var resizeToTarget = function(span, targetheight, targetwidth) { var iheight = span.height(); var iwidth = span.width(); var count = 0 if (iheight < targetheight && iheight) { //alert("grow! ih: " + iheight + " th: " + targetheight); while (iheight < targetheight && iwidth < targetwidth) { var cfs = parseInt(span.css("font-size").replace("px", "")); span = span.css("font-size", (cfs + 1) + "px"); iheight = span.height(); count++; if (count > 50) { //alert("growing too long: iheight: " + iheight + " th: " + targetheight); break; } } } else if (iheight > targetheight) { while (iheight && iheight > targetheight) { var cfs = parseInt(span.css("font-size").replace("px", "")); span = span.css("font-size", (cfs - 1) + "px"); iheight = span.height(); //alert("ih: " + iheight + " fs:" + cfs + " th: " + targetheight); //alert("iheight: " + iheight + " fs: " + span.css("font-size") + " cfs: " + (cfs - 1)); count++; if (count > 50) { //alert("shrinking too long: iheight: " + iheight + " th: " + targetheight); break; } } } return span.css("font-size"); } // Horrid function to try and position lines how they would be on // the source material. TODO: Make this not suck. this.positionByBounds = function() { var dims = m_pagediv.data("bbox"); var scale = (m_pagediv.outerWidth(true)) / dims[2]; var offx = m_pagediv.offset().left; var offy = m_pagediv.offset().top; m_pagediv.height(((dims[3] - dims[1]) * scale) + 20); var heights = []; var orderedheights = []; var orderedwidths = []; m_pagediv.addClass("literal"); m_pagediv.children(".ocr_line").each(function(position, item) { $(item).children("br").remove(); var lspan = $(item); var linedims = lspan.data("bbox"); var x = ((linedims[0] - dims[0]) * scale) + offx; var y = ((linedims[1] - dims[1]) * scale) + offy; var w = (linedims[2] * scale); var h = (linedims[3] * scale); lspan.css("top", y).css("left", x) .css("position", "absolute"); heights.push(h); orderedheights.push(h); orderedwidths.push(w); }); var stats = new Stats(heights); var medianfs = null; m_pagediv.children(".ocr_line").each(function(position, item) { //var lspan = $(item); //var iheight = lspan.height(); //var iwidth = lspan.width(); // if 'h' is within .25% of median, use the median instead var h = orderedheights[position]; var w = orderedwidths[position]; var ismedian = false; if ((h / stats.median - 1) < 0.25) { h = stats.median; ismedian = true; } // also clamp 'h' is min 3 h = Math.max(h, 3); if (medianfs != null && ismedian) { $(item).css("font-size", medianfs); } else { var fs = resizeToTarget($(item), h, w); if (medianfs == null && ismedian) { medianfs = fs; } } }); } } OcrTranscript.prototype.onBatchLoad = function() { } OcrTranscript.prototype.onPageLoad = function() { } OcrTranscript.prototype.onPageChange = function() { } OcrTranscript.prototype.onClickPosition = function(position) { }
static/js/ocr_transcript.js
// Class for showing a GUI for correcting transcripts of OCR batches // function OcrTranscript(insertinto_id, batch_id) { var m_batch_id = batch_id; var m_page = 0; var m_batchdata = null; // editor for each line var m_editor = null; // page data cache var m_pagedata = null; // alias 'this' for use from within callbacks var self = this; // UI bits it's useful to keep a reference to: var m_container = $("<div></div>") .addClass("widget"); // .css("width", "400px") // .css("height", "200px") // .draggable({ // stack: ".widget", // snap: "#workspace", // handle: "#batch_head", // }).resizable({ // minWidth: 300, // resize: function(e, ui) { // $(".transcript_lines") // .css("min-height", $(this).height() - 45); // }, // }).sortable({connectWith: ".widget"}); var m_header = $("<div></div>") .addClass("batch_head") .addClass("widget_header") .attr("id", "batch_head") .text("OCR Batch"); var m_pagename = $("<span></span>") .attr("id", "page_name"); var m_pagecount = $("<span></span>") .attr("id", "page_count"); var m_scroller = $("<div></div>") .attr("id", "scroll_container"); var m_pagediv = $("<div></div>") .addClass("waiting") .addClass("transcript_lines"); //.css("min-height", m_container.height() - 45); this.init = function() { self.buildUi(); self.refresh(); } this.refresh = function() { setData(); } this.setBatchId = function(batch_id) { m_batch_id = batch_id; self.refresh(); } this.setPage = function(page_index) { m_page = page_index; self.refreshPageData(); } // set a waiting spinner when doing something this.setWaiting = function(waiting) { m_pagediv.toggleClass("waiting", waiting); } this.page = function() { return m_page; } this.pageCount = function() { return m_batchdata.extras.task_count; } this.pageData = function() { return m_pagedata; } this.refresh = function() { $.ajax({ url: "/batch/results/" + m_batch_id + "/?start=" + m_page + "&end=" + (m_page + 1), dataType: "json", beforeSend: function(e) { self.setWaiting(true); }, complete: function(e) { self.setWaiting(false); }, success: function(data) { if (data == null) { alert("Unable to retrieve page data."); } else if (data.error) { alert(data.error); } m_batchdata = data[0]; self.onBatchLoad(); self.refreshPageData(); }, }); } this.refreshPageData = function() { $.ajax({ url: "/batch/results/" + m_batch_id + "/" + m_page + "/", data: {}, dataType: "json", beforeSend: function(e) { self.setWaiting(true); }, complete: function(e) { self.setWaiting(false); }, success: function(data) { if (data == null) { alert("Unable to retrieve page data."); } else if (data.error) { alert(data.error); } else if (data.length != 1) { alert("Data length error - should be 1 element long"); } else { m_pagedata = data[0]; self.onPageLoad(); setPageLines(data[0]); } }, }); self.onPageChange(); } this.buildUi = function() { m_container.append( m_header.append(m_pagecount).append(m_pagename)) .append(m_scroller.append(m_pagediv)) .appendTo("#" + insertinto_id); } /* * Events */ // m_batchdiv.bind("mouseup", function(event) { // var sel = window.getSelection(); // if (sel.toString() == "" || sel.rangeCount > 1) // return; // // var elem = $(sel.baseNode.parentElement); // $(document).bind("keydown.lineedit", function(e) { // if (e.which == 27) { // escape // alert("escape"); // $(document).unbind(".lineedit"); // return false; // } else if (e.which == 13) { // alert("return"); // $(document).unbind(".lineedit"); // return false; // } else { // alert(e.which); // } // }); // }); $(".ocr_line").live("mouseover mouseout", function(event) { if (event.type == "mouseover") { $(this).addClass("hover"); } else { $(this).removeClass("hover"); } }); $(".ocr_line").live("click", function(event) { if (m_editor == null) { m_editor = new OcrLineEditor(insertinto_id); m_editor.setElement(this, event); } else if (m_editor.element() && m_editor.element().get(0) === this) { // don't do anything - we're already editing it } else { m_editor.setElement(this, event); } self.onClickPosition($(this).data("bbox")); }); var setPageLines = function(data) { m_pagecount.text("Page " + (m_page + 1) + " of " + m_batchdata.extras.task_count); m_pagename.text(data.fields.page_name); m_pagediv.find(".ocr_line").remove(); m_pagediv.data("bbox", data.fields.results.box); $.each(data.fields.results.lines, function(linenum, line) { lspan = $("<span></span>") .text(line.text) .addClass("ocr_line") .data("bbox", line.box); m_pagediv.append(lspan); }); self.insertBreaks(); } // // Layout: Functions for arranging the lines in certain ways // TODO: Remove code dup between this and the ocr_page.js // file. // // parse bbox="0 20 500 300" into [0, 20, 500, 300] var parseBoundingBoxAttr = function(bbox) { var dims = [-1, -1, -1, -1]; if (bbox.match(boxpattern)) { dims[0] = parseInt(RegExp.$1); dims[1] = parseInt(RegExp.$2); dims[2] = parseInt(RegExp.$3); dims[3] = parseInt(RegExp.$4); } return dims; } // Fudgy function to insert line breaks (<br />) in places // where there are large gaps between lines. Significantly // improves the look of a block of OCR'd text. this.insertBreaks = function() { // insert space between each line $("<span></span>").text("\u00a0").insertBefore( m_pagediv.find(".ocr_line").first().nextAll()); var lastyh = -1; var lasth = -1; var lastitem; m_pagediv.removeClass("literal"); m_pagediv.children(".ocr_line").each(function(lnum, item) { var dims = $(item).data("bbox"); var y = dims[1]; // bbox x, y, w, h var h = dims[3]; if (dims[0] != -1) { $(item).attr("style", ""); $(item).children("br").remove(); if ((lastyh != -1 && lasth != -1) && (y - (h * 0.75) > lastyh || lasth < (h * 0.75))) { $(lastitem).append($("<br />")).append($("<br />")); } lastitem = item; lastyh = y + h; lasth = h; } }); m_pagediv.css("height", null); } var resizeToTarget = function(span, targetheight, targetwidth) { var iheight = span.height(); var iwidth = span.width(); var count = 0 if (iheight < targetheight && iheight) { //alert("grow! ih: " + iheight + " th: " + targetheight); while (iheight < targetheight && iwidth < targetwidth) { var cfs = parseInt(span.css("font-size").replace("px", "")); span = span.css("font-size", (cfs + 1) + "px"); iheight = span.height(); count++; if (count > 50) { //alert("growing too long: iheight: " + iheight + " th: " + targetheight); break; } } } else if (iheight > targetheight) { while (iheight && iheight > targetheight) { var cfs = parseInt(span.css("font-size").replace("px", "")); span = span.css("font-size", (cfs - 1) + "px"); iheight = span.height(); //alert("ih: " + iheight + " fs:" + cfs + " th: " + targetheight); //alert("iheight: " + iheight + " fs: " + span.css("font-size") + " cfs: " + (cfs - 1)); count++; if (count > 50) { //alert("shrinking too long: iheight: " + iheight + " th: " + targetheight); break; } } } return span.css("font-size"); } // Horrid function to try and position lines how they would be on // the source material. TODO: Make this not suck. this.positionByBounds = function() { var dims = m_pagediv.data("bbox"); var scale = (m_pagediv.outerWidth(true)) / dims[2]; var offx = m_pagediv.offset().left; var offy = m_pagediv.offset().top; m_pagediv.height(((dims[3] - dims[1]) * scale) + 20); var heights = []; var orderedheights = []; var orderedwidths = []; m_pagediv.addClass("literal"); m_pagediv.children(".ocr_line").each(function(position, item) { $(item).children("br").remove(); var lspan = $(item); var linedims = lspan.data("bbox"); var x = ((linedims[0] - dims[0]) * scale) + offx; var y = ((linedims[1] - dims[1]) * scale) + offy; var w = (linedims[2] * scale); var h = (linedims[3] * scale); lspan.css("top", y).css("left", x) .css("position", "absolute"); heights.push(h); orderedheights.push(h); orderedwidths.push(w); }); var stats = new Stats(heights); var medianfs = null; m_pagediv.children(".ocr_line").each(function(position, item) { //var lspan = $(item); //var iheight = lspan.height(); //var iwidth = lspan.width(); // if 'h' is within .25% of median, use the median instead var h = orderedheights[position]; var w = orderedwidths[position]; var ismedian = false; if ((h / stats.median - 1) < 0.25) { h = stats.median; ismedian = true; } // also clamp 'h' is min 3 h = Math.max(h, 3); if (medianfs != null && ismedian) { $(item).css("font-size", medianfs); } else { var fs = resizeToTarget($(item), h, w); if (medianfs == null && ismedian) { medianfs = fs; } } }); } } OcrTranscript.prototype.onBatchLoad = function() { } OcrTranscript.prototype.onPageLoad = function() { } OcrTranscript.prototype.onPageChange = function() { } OcrTranscript.prototype.onClickPosition = function(position) { }
Add an id to the line container and make the insertBreaks function add breaks as siblings, rather than children of the given lines
static/js/ocr_transcript.js
Add an id to the line container and make the insertBreaks function add breaks as siblings, rather than children of the given lines
<ide><path>tatic/js/ocr_transcript.js <ide> .attr("id", "scroll_container"); <ide> var m_pagediv = $("<div></div>") <ide> .addClass("waiting") <del> .addClass("transcript_lines"); <add> .addClass("transcript_lines") <add> .attr("id", "transcript_lines"); <ide> //.css("min-height", m_container.height() - 45); <ide> <ide> <ide> $(item).children("br").remove(); <ide> if ((lastyh != -1 && lasth != -1) <ide> && (y - (h * 0.75) > lastyh || lasth < (h * 0.75))) { <del> $(lastitem).append($("<br />")).append($("<br />")); <add> $(lastitem).after($("<br />")).after($("<br />")); <ide> } <ide> lastitem = item; <ide> lastyh = y + h;
Java
mit
d002502fbd1520f55935b357b166769983ab2c54
0
jonfryd/tifoon,jonfryd/tifoon,jonfryd/tifoon
package com.elixlogic.tifoon.plugin; import com.elixlogic.tifoon.domain.model.scanner.*; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.elixlogic.tifoon.plugin.executer.ExecutorPlugin; import com.elixlogic.tifoon.plugin.scanner.AbstractScannerPlugin; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.SystemUtils; import org.nmap4j.data.NMapRun; import org.nmap4j.data.host.Address; import org.nmap4j.data.nmaprun.Host; import org.nmap4j.parser.OnePassParser; import javax.annotation.Nullable; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @Slf4j public class NmapPortScannerPlugin extends AbstractScannerPlugin { private static final String PROVIDES = "nmap"; private static final Set<String> UNSUPPORTED_SCAN_TECHNIQUES = ImmutableSet.of("-sO"); private static final Set<String> ROOT_SCAN_TECHNIQUES = ImmutableSet.of("-sS", "-sA", "-sW", "-sM", "-sU", "-sN", "-sF", "-sX", "-sI", "-sY", "-sZ"); @Override public boolean supports(final String _s) { return PROVIDES.equals(_s); } @Override public NetworkResult scan(@NonNull final PortScannerJob _request, @NonNull final ExecutorPlugin _executorPlugin, @Nullable final String _additionalParameters) { try { final String scanResultFilename = String.format("nmap_scan_result_%s.xml", UUID.randomUUID().toString()); final String[] commandWithArguments = buildNmapCommandWithArguments(_request, scanResultFilename, _executorPlugin.getRunningAsUsername(), _additionalParameters); final byte result[] = _executorPlugin.dispatch("nmap", commandWithArguments, scanResultFilename); return mapXmlToPortScannerResult(_request, result); } catch (Exception _e) { log.error("Error running nmap", _e); return new NetworkResult(_request.getNetworkId(), false, Collections.EMPTY_MAP); } } private String[] buildNmapCommandWithArguments(@NonNull final PortScannerJob _request, @NonNull final String _scanResultFilename, @NonNull final String _runningAsUsername, @Nullable final String _additionalParameters) { // create port argument based on port ranges grouped by protocol final List<String> impliedScanTypes = new LinkedList<>(); final Map<Protocol, List<PortRange>> portRangesByProtocol = _request.getPortRanges() .stream() .collect(Collectors.groupingBy(PortRange::toProtocol)); final List<String> portRanges = new LinkedList<>(); boolean protocolsOtherThanTCP = false; for(Map.Entry<Protocol, List<PortRange>> entry : portRangesByProtocol.entrySet()) { final StringBuilder stringBuilder = new StringBuilder(); switch(entry.getKey()) { case UDP: stringBuilder.append("U:"); impliedScanTypes.add("-sU"); break; case TCP: stringBuilder.append("T:"); impliedScanTypes.add("-sS"); break; case SCTP: stringBuilder.append("S:"); impliedScanTypes.add("-sY"); break; default: throw new IllegalArgumentException(String.format("Unknown protocol: %s", entry.getKey())); } protocolsOtherThanTCP |= (entry.getKey() != Protocol.TCP); final String portRangesForProtocol = entry.getValue() .stream() .map(PortRange::toSingleOrIntervalString) .collect(Collectors.joining(",")); stringBuilder.append(portRangesForProtocol); portRanges.add(stringBuilder.toString()); } final String nmapPortRanges = portRanges .stream() .collect(Collectors.joining(",")); final List<String> targetHosts = _request.getHosts() .stream() .map(com.elixlogic.tifoon.domain.model.scanner.Host::getHostAddress) .collect(Collectors.toList()); final List<String> additionalParameters = Stream.of(Optional.ofNullable(_additionalParameters).orElse("") .split(" ")) .collect(Collectors.toList()); final List<String> argumentsList = Lists.newArrayList("-oX", _scanResultFilename, "-p", nmapPortRanges); // only add implied scan types if absolutely necessary, otherwise rely on default // (TCP connect for non-root, stealth for root) if (protocolsOtherThanTCP) { argumentsList.addAll(impliedScanTypes); } argumentsList.addAll(targetHosts); argumentsList.addAll(0, additionalParameters); // check for unsupported scan techniques final Set<String> unsupportedScanTypes = new HashSet<>(UNSUPPORTED_SCAN_TECHNIQUES); unsupportedScanTypes.retainAll(argumentsList); if (!unsupportedScanTypes.isEmpty()) { // warn about running non-root, since these scan types require root access on Unixes throw new IllegalArgumentException(String.format("Unsupported scan type(s) specified: %s", unsupportedScanTypes.toString())); } // identify root scan techniques final Set<String> rootScanTypes = new HashSet<>(ROOT_SCAN_TECHNIQUES); rootScanTypes.retainAll(argumentsList); if (!rootScanTypes.isEmpty() && SystemUtils.IS_OS_UNIX && !("root".equals(_runningAsUsername))) { // warn about running non-root, since these scan types require root access on Unixes log.warn("Scan requires root privileges. Please re-run Tifoon as root."); } return argumentsList.toArray(new String[argumentsList.size()]); } private NetworkResult mapXmlToPortScannerResult(@NonNull final PortScannerJob _request, @Nullable final byte[] _result) { if (_result == null) { return new NetworkResult(_request.getNetworkId(), false, Collections.EMPTY_MAP); } final Map<InetAddress, List<Port>> openPortsMap = Maps.newHashMap(); final OnePassParser opp = new OnePassParser(); final NMapRun nmapRun = opp.parse(new String(_result, StandardCharsets.UTF_8), OnePassParser.STRING_INPUT); if (nmapRun != null) { for(Host host : nmapRun.getHosts()) { final List<Port> openPorts = host.getPorts().getPorts().stream() .filter(port -> port.getState().getState().equals("open")) .map(port -> Port.from(mapProtocol(port.getProtocol()), (int) port.getPortId())) .collect(Collectors.toList()); if (openPorts.isEmpty()) { // nothing to see here, carry on, please... :) continue; } for(Address address : host.getAddresses()) { try { final InetAddress inetAddress = InetAddress.getByName(address.getAddr()); openPortsMap.put(inetAddress, openPorts); } catch (UnknownHostException _e) { // ignore } } } } return new NetworkResult(_request.getNetworkId(), true, openPortsMap); } private Protocol mapProtocol(@NonNull final String _protocol) { switch(_protocol) { case "tcp": return Protocol.TCP; case "udp": return Protocol.UDP; case "stcp": return Protocol.SCTP; default: throw new IllegalArgumentException(String.format("Unknown protocol: %s", _protocol)); } } }
tifoon-plugins/tifoon-nmap-scanner-plugin/src/main/java/com/elixlogic/tifoon/plugin/NmapPortScannerPlugin.java
package com.elixlogic.tifoon.plugin; import com.elixlogic.tifoon.domain.model.scanner.*; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.elixlogic.tifoon.plugin.executer.ExecutorPlugin; import com.elixlogic.tifoon.plugin.scanner.AbstractScannerPlugin; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.SystemUtils; import org.nmap4j.data.NMapRun; import org.nmap4j.data.host.Address; import org.nmap4j.data.nmaprun.Host; import org.nmap4j.parser.OnePassParser; import javax.annotation.Nullable; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; @Slf4j public class NmapPortScannerPlugin extends AbstractScannerPlugin { private static final String PROVIDES = "nmap"; private static final Set<String> UNSUPPORTED_SCAN_TECHNIQUES = ImmutableSet.of("-sO"); private static final Set<String> ROOT_SCAN_TECHNIQUES = ImmutableSet.of("-sS", "-sA", "-sW", "-sM", "-sU", "-sN", "-sF", "-sX", "-sI", "-sY", "-sZ"); @Override public boolean supports(final String _s) { return PROVIDES.equals(_s); } @Override public NetworkResult scan(@NonNull final PortScannerJob _request, @NonNull final ExecutorPlugin _executorPlugin, @Nullable final String _additionalParameters) { try { final String scanResultFilename = String.format("nmap_scan_result_%s.xml", UUID.randomUUID().toString()); final String[] commandWithArguments = buildNmapCommandWithArguments(_request, scanResultFilename, _executorPlugin.getRunningAsUsername(), _additionalParameters); final byte result[] = _executorPlugin.dispatch("nmap", commandWithArguments, scanResultFilename); return mapXmlToPortScannerResult(_request, result); } catch (Exception _e) { log.error("Error running nmap", _e); return new NetworkResult(_request.getNetworkId(), false, Collections.EMPTY_MAP); } } private String[] buildNmapCommandWithArguments(@NonNull final PortScannerJob _request, @NonNull final String _scanResultFilename, @NonNull final String _runningAsUsername, @Nullable final String _additionalParameters) { // create port argument based on port ranges grouped by protocol final List<String> impliedScanTypes = new LinkedList<>(); final Map<Protocol, List<PortRange>> portRangesByProtocol = _request.getPortRanges() .stream() .collect(Collectors.groupingBy(PortRange::toProtocol)); final List<String> portRanges = new LinkedList<>(); boolean protocolsOtherThanTCP = false; for(Map.Entry<Protocol, List<PortRange>> entry : portRangesByProtocol.entrySet()) { final StringBuilder stringBuilder = new StringBuilder(); switch(entry.getKey()) { case UDP: stringBuilder.append("U:"); impliedScanTypes.add("-sU"); break; case TCP: stringBuilder.append("T:"); impliedScanTypes.add("-sS"); break; case SCTP: stringBuilder.append("S:"); impliedScanTypes.add("-sY"); break; default: throw new IllegalArgumentException(String.format("Unknown protocol: %s", entry.getKey())); } protocolsOtherThanTCP |= (entry.getKey() != Protocol.TCP); final String portRangesForProtocol = entry.getValue() .stream() .map(PortRange::toSingleOrIntervalString) .collect(Collectors.joining(",")); stringBuilder.append(portRangesForProtocol); portRanges.add(stringBuilder.toString()); } final String nmapPortRanges = portRanges .stream() .collect(Collectors.joining(",")); final List<String> targetHosts = _request.getHosts() .stream() .map(com.elixlogic.tifoon.domain.model.scanner.Host::getHostAddress) .collect(Collectors.toList()); final List<String> additionalParameters = Stream.of(Optional.ofNullable(_additionalParameters).orElse("") .split(" ")) .collect(Collectors.toList()); final List<String> argumentsList = Lists.newArrayList("-oX", _scanResultFilename, "-p", nmapPortRanges); // only add implied scan types if absolutely necessary, otherwise rely on default // (TCP connect for non-root, stealth for root) if (protocolsOtherThanTCP) { argumentsList.addAll(impliedScanTypes); } argumentsList.addAll(targetHosts); argumentsList.addAll(0, additionalParameters); // check for unsupported scan techniques final Set<String> unsupportedScanTypes = new HashSet<>(UNSUPPORTED_SCAN_TECHNIQUES); unsupportedScanTypes.retainAll(argumentsList); if (!unsupportedScanTypes.isEmpty()) { // warn about running non-root, since these scan types require root access on Unixes throw new IllegalArgumentException(String.format("Unsupported scan type(s) specified: %s", unsupportedScanTypes.toString())); } // identify root scan techniques final Set<String> rootScanTypes = new HashSet<>(ROOT_SCAN_TECHNIQUES); rootScanTypes.retainAll(argumentsList); if (!rootScanTypes.isEmpty() && SystemUtils.IS_OS_UNIX && !("root".equals(_runningAsUsername))) { // warn about running non-root, since these scan types require root access on Unixes log.warn("Scan types require root privileges. Please re-run Tifoon as root."); } return argumentsList.toArray(new String[argumentsList.size()]); } private NetworkResult mapXmlToPortScannerResult(@NonNull final PortScannerJob _request, @Nullable final byte[] _result) { if (_result == null) { return new NetworkResult(_request.getNetworkId(), false, Collections.EMPTY_MAP); } final Map<InetAddress, List<Port>> openPortsMap = Maps.newHashMap(); final OnePassParser opp = new OnePassParser(); final NMapRun nmapRun = opp.parse(new String(_result, StandardCharsets.UTF_8), OnePassParser.STRING_INPUT); if (nmapRun != null) { for(Host host : nmapRun.getHosts()) { final List<Port> openPorts = host.getPorts().getPorts().stream() .filter(port -> port.getState().getState().equals("open")) .map(port -> Port.from(mapProtocol(port.getProtocol()), (int) port.getPortId())) .collect(Collectors.toList()); if (openPorts.isEmpty()) { // nothing to see here, carry on, please... :) continue; } for(Address address : host.getAddresses()) { try { final InetAddress inetAddress = InetAddress.getByName(address.getAddr()); openPortsMap.put(inetAddress, openPorts); } catch (UnknownHostException _e) { // ignore } } } } return new NetworkResult(_request.getNetworkId(), true, openPortsMap); } private Protocol mapProtocol(@NonNull final String _protocol) { switch(_protocol) { case "tcp": return Protocol.TCP; case "udp": return Protocol.UDP; case "stcp": return Protocol.SCTP; default: throw new IllegalArgumentException(String.format("Unknown protocol: %s", _protocol)); } } }
Better wording
tifoon-plugins/tifoon-nmap-scanner-plugin/src/main/java/com/elixlogic/tifoon/plugin/NmapPortScannerPlugin.java
Better wording
<ide><path>ifoon-plugins/tifoon-nmap-scanner-plugin/src/main/java/com/elixlogic/tifoon/plugin/NmapPortScannerPlugin.java <ide> <ide> if (!rootScanTypes.isEmpty() && SystemUtils.IS_OS_UNIX && !("root".equals(_runningAsUsername))) { <ide> // warn about running non-root, since these scan types require root access on Unixes <del> log.warn("Scan types require root privileges. Please re-run Tifoon as root."); <add> log.warn("Scan requires root privileges. Please re-run Tifoon as root."); <ide> } <ide> <ide> return argumentsList.toArray(new String[argumentsList.size()]);
Java
mit
d96ded53074ccadb0a4c92d501b4c0be89e02ad2
0
nh13/picard,broadinstitute/picard,nh13/picard,alecw/picard,broadinstitute/picard,alecw/picard,broadinstitute/picard,broadinstitute/picard,alecw/picard,nh13/picard,nh13/picard,alecw/picard,broadinstitute/picard
/* * The MIT License * * Copyright (c) 2009 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package picard.sam; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.util.CloserUtil; import org.testng.Assert; import org.testng.annotations.Test; import java.io.File; import java.util.Iterator; /** * Basic positive and negative tests for SplitSamByLibrary command-line program * * @author [email protected] */ public class SplitSamByLibraryTest { @Test public void testNoLibrarySpecified() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/invalid_coord_sort_order.sam"); Assert.assertEquals(splitter.doWork(), SplitSamByLibrary.NO_LIBRARIES_SPECIFIED_IN_HEADER, "SAM file with no libraries should failed but didn't."); } @Test public void basicPositiveTest() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/split_test.sam"); Assert.assertEquals(splitter.doWork(), 0, "SAM file split should have succeeded but didn't."); File f = new File("unknown.sam"); Assert.assertTrue(f.exists(), "unknown.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "unknown.sam has the wrong number of reads"); f.delete(); f = new File("lib-1.sam"); Assert.assertTrue(f.exists(), "lib-1.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 6, "lib-1.sam has the wrong number of reads"); f.delete(); f = new File("lib-2.sam"); Assert.assertFalse(f.exists(), "lib-2.sam should not exist but does"); if (f.exists()) f.delete(); f = new File("lib-3.sam"); Assert.assertTrue(f.exists(), "lib-3.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "lib-3.sam has the wrong number of reads"); f.delete(); } @Test public void testNoUnknownFile() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/split_test2.sam"); Assert.assertEquals(splitter.doWork(), 0, "SAM file split should have succeeded but didn't."); // The unknown file should exist and have two reads File f = new File("unknown.sam"); Assert.assertFalse(f.exists(), "unknown.sam should not exist but does"); if (f.exists()) f.delete(); f = new File("lib-1.sam"); Assert.assertTrue(f.exists(), "lib-1.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 4, "lib-1.sam has the wrong number of reads"); f.delete(); f = new File("lib-3.sam"); Assert.assertTrue(f.exists(), "lib-3.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "lib-3.sam has the wrong number of reads"); f.delete(); } private int countReads(File samFile) { SamReader reader = SamReaderFactory.makeDefault().open(samFile); int count = 0; for (Iterator it = reader.iterator(); it.hasNext(); ) { it.next(); count++; } CloserUtil.close(reader); return count; } }
src/test/java/picard/sam/SplitSamByLibraryTest.java
/* * The MIT License * * Copyright (c) 2009 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package picard.sam; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.util.CloserUtil; import org.testng.Assert; import org.testng.annotations.Test; import java.io.File; import java.util.Iterator; /** * Basic positive and negative tests for SplitSamByLibrary command-line program * * @author [email protected] */ public class SplitSamByLibraryTest { @Test public void testNoLibrarySpecified() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/invalid_coord_sort_order.sam"); Assert.assertEquals(splitter.doWork(), SplitSamByLibrary.NO_LIBRARIES_SPECIFIED_IN_HEADER, "SAM file with no libraries should failed but didn't."); } @Test public void basicPositiveTest() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/split_test.sam"); Assert.assertEquals(splitter.doWork(), 0, "SAM file split should have succeeded but didn't."); File f = new File("unknown.sam"); Assert.assertTrue(f.exists(), "uknown.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "unknown.sam has the wrong number of reads"); f.delete(); f = new File("lib-1.sam"); Assert.assertTrue(f.exists(), "lib-1.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 6, "lib-1.sam has the wrong number of reads"); f.delete(); f = new File("lib-2.sam"); Assert.assertFalse(f.exists(), "lib-2.sam should not exist but does"); if (f.exists()) f.delete(); f = new File("lib-3.sam"); Assert.assertTrue(f.exists(), "lib-3.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "lib-3.sam has the wrong number of reads"); f.delete(); } @Test public void testNoUnknownFile() { SplitSamByLibrary splitter = new SplitSamByLibrary(); splitter.INPUT = new File("testdata/picard/sam/split_test2.sam"); Assert.assertEquals(splitter.doWork(), 0, "SAM file split should have succeeded but didn't."); // The unknown file should exist and have two reads File f = new File("unknown.sam"); Assert.assertFalse(f.exists(), "uknown.sam should not exist but does"); if (f.exists()) f.delete(); f = new File("lib-1.sam"); Assert.assertTrue(f.exists(), "lib-1.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 4, "lib-1.sam has the wrong number of reads"); f.delete(); f = new File("lib-3.sam"); Assert.assertTrue(f.exists(), "lib-3.sam should exist but doesn't"); Assert.assertEquals(countReads(f), 2, "lib-3.sam has the wrong number of reads"); f.delete(); } private int countReads(File samFile) { SamReader reader = SamReaderFactory.makeDefault().open(samFile); int count = 0; for (Iterator it = reader.iterator(); it.hasNext(); ) { it.next(); count++; } CloserUtil.close(reader); return count; } }
Typo in SplitSamByLibraryTest
src/test/java/picard/sam/SplitSamByLibraryTest.java
Typo in SplitSamByLibraryTest
<ide><path>rc/test/java/picard/sam/SplitSamByLibraryTest.java <ide> Assert.assertEquals(splitter.doWork(), 0, "SAM file split should have succeeded but didn't."); <ide> <ide> File f = new File("unknown.sam"); <del> Assert.assertTrue(f.exists(), "uknown.sam should exist but doesn't"); <add> Assert.assertTrue(f.exists(), "unknown.sam should exist but doesn't"); <ide> Assert.assertEquals(countReads(f), 2, "unknown.sam has the wrong number of reads"); <ide> f.delete(); <ide> <ide> <ide> // The unknown file should exist and have two reads <ide> File f = new File("unknown.sam"); <del> Assert.assertFalse(f.exists(), "uknown.sam should not exist but does"); <add> Assert.assertFalse(f.exists(), "unknown.sam should not exist but does"); <ide> if (f.exists()) f.delete(); <ide> <ide> f = new File("lib-1.sam");
Java
apache-2.0
09192330c194f30aeb7f57a8f55fefc0be0354ff
0
omerio/cloudex,omerio/cloudex-google
/** * The contents of this file may be used under the terms of the Apache License, Version 2.0 * in which case, the provisions of the Apache License Version 2.0 are applicable instead of those above. * * Copyright 2014, Ecarf.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudex.cloud.impl.google.compute; import io.cloudex.framework.utils.ObjectUtils; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * @author Omer Dawelbeit (omerio) * */ public class GoogleMetaData { private final static Log log = LogFactory.getLog(GoogleMetaData.class); // metadata server urls public static final String METADATA_SERVER_URL = "http://metadata.google.internal/computeMetadata/v1/"; public static final String TOKEN_PATH = "instance/service-accounts/default/token"; //private static final String SERVICE_ACCOUNT_PATH = "service-accounts/default/?recursive=true"; public static final String INSTANCE_ALL_PATH = "instance/?recursive=true"; //private static final String PROJECT_ALL_PATH = "project/?recursive=true"; public static final String PROJECT_ID_PATH = "project/project-id"; public static final String ATTRIBUTES_PATH = "instance/attributes/?recursive=true"; // without timeout it doesn't return public static final String WAIT_FOR_CHANGE = "&wait_for_change=true&timeout_sec=360"; // scopes public static final String DATASTORE_SCOPE = "https://www.googleapis.com/auth/datastore"; public static final String RESOURCE_BASE_URL = "https://www.googleapis.com/compute/v1/projects/"; public static final String NETWORK = "/global/networks/"; public static final String ZONES = "/zones/"; public static final String MACHINE_TYPES = "/machineTypes/"; public static final String DISK_TYPES = "/diskTypes/"; public static final String CENTO_IMAGE = "/centos-cloud/global/images/centos-6-v20140318"; public static final String ACCESS_TOKEN = "access_token"; public static final String EXPIRES_IN = "expires_in"; public static final String PROJECT_ID = "projectId"; public static final String ID = "id"; public static final String HOSTNAME = "hostname"; public static final String ZONE = "zone"; public static final String ATTRIBUTES = "attributes"; public static final String ITEMS = "items"; public static final String FINGER_PRINT = "fingerprint"; public static final String DONE = "DONE"; public static final String EMAIL = "email"; public static final String SCOPES = "scopes"; public static final String SERVICE_ACCOUNTS = "serviceAccounts"; public static final String DEFAULT = "default"; public static final String IMAGE = "image"; public static final String PERSISTENT = "PERSISTENT"; public static final String MIGRATE = "MIGRATE"; public static final String EXT_NAT = "External NAT"; public static final String ONE_TO_ONE_NAT = "ONE_TO_ONE_NAT"; public static final String STARTUP_SCRIPT = "startup-script"; public static final String CLOUD_STORAGE_PREFIX = "gs://"; public static final String NOT_FOUND = "404 Not Found"; // BigQuery create/write disposition public static final String CREATE_NEVER = "CREATE_NEVER"; public static final String CREATE_IF_NEEDED = "CREATE_IF_NEEDED"; public static final String WRITE_APPEND = "WRITE_APPEND"; public static final String TYPE_STRING = "STRING"; public static final String TYPE_INTEGER = "INTEGER"; // API error reasons public static final String RATE_LIMIT_EXCEEDED = "rateLimitExceeded"; public static final String QUOTA_EXCEEDED = "quotaExceeded"; /** * Call the metadata server, this returns details for the current instance not for * different instances. In order to retrieve the meta data of different instances * we just use the compute api, see getInstance * @param path * @return * @throws IOException */ public static String getMetaData(String path) throws IOException { log.debug("Retrieving metadata from server, path: " + path); URL metadata = new URL(METADATA_SERVER_URL + path); HttpURLConnection con = (HttpURLConnection) metadata.openConnection(); // optional default is GET //con.setRequestMethod("GET"); //add request header con.setRequestProperty("Metadata-Flavor", "Google"); int responseCode = con.getResponseCode(); StringBuilder response = new StringBuilder(); if(responseCode == 200) { try(BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) { String inputLine; while ((inputLine = in.readLine()) != null) { response.append(inputLine); } } } else { String msg = "Metadata server responded with status code: " + responseCode; log.error(msg); throw new IOException(msg); } log.debug("Successfully retrieved metadata from server"); return response.toString(); } /** * Return the metadata as a map * @param path * @return */ public static Map<String, Object> getMetaDataAsMap(String path) throws IOException { String metaData = getMetaData(path); return ObjectUtils.jsonToMap(metaData); } }
src/main/java/io/cloudex/cloud/impl/google/compute/GoogleMetaData.java
/** * The contents of this file may be used under the terms of the Apache License, Version 2.0 * in which case, the provisions of the Apache License Version 2.0 are applicable instead of those above. * * Copyright 2014, Ecarf.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudex.cloud.impl.google.compute; import io.cloudex.framework.utils.ObjectUtils; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * @author Omer Dawelbeit (omerio) * */ public class GoogleMetaData { private final static Log log = LogFactory.getLog(GoogleMetaData.class); // metadata server urls public static final String METADATA_SERVER_URL = "http://metadata.google.internal/computeMetadata/v1/"; public static final String TOKEN_PATH = "instance/service-accounts/default/token"; //private static final String SERVICE_ACCOUNT_PATH = "service-accounts/default/?recursive=true"; public static final String INSTANCE_ALL_PATH = "instance/?recursive=true"; //private static final String PROJECT_ALL_PATH = "project/?recursive=true"; public static final String PROJECT_ID_PATH = "project/project-id"; public static final String ATTRIBUTES_PATH = "instance/attributes/?recursive=true"; // without timeout it doesn't return public static final String WAIT_FOR_CHANGE = "&wait_for_change=true&timeout_sec=360"; // scopes public static final String DATASTORE_SCOPE = "https://www.googleapis.com/auth/datastore"; public static final String RESOURCE_BASE_URL = "https://www.googleapis.com/compute/v1/projects/"; public static final String NETWORK = "/global/networks/"; public static final String ZONES = "/zones/"; public static final String MACHINE_TYPES = "/machineTypes/"; public static final String DISK_TYPES = "/diskTypes/"; public static final String CENTO_IMAGE = "/centos-cloud/global/images/centos-6-v20140318"; public static final String ACCESS_TOKEN = "access_token"; public static final String EXPIRES_IN = "expires_in"; public static final String PROJECT_ID = "projectId"; public static final String ID = "id"; public static final String HOSTNAME = "hostname"; public static final String ZONE = "zone"; public static final String ATTRIBUTES = "attributes"; public static final String ITEMS = "items"; public static final String FINGER_PRINT = "fingerprint"; public static final String DONE = "DONE"; public static final String EMAIL = "email"; public static final String SCOPES = "scopes"; public static final String SERVICE_ACCOUNTS = "serviceAccounts"; public static final String DEFAULT = "default"; public static final String IMAGE = "image"; public static final String PERSISTENT = "PERSISTENT"; public static final String MIGRATE = "MIGRATE"; public static final String EXT_NAT = "External NAT"; public static final String ONE_TO_ONE_NAT = "ONE_TO_ONE_NAT"; public static final String STARTUP_SCRIPT = "startup-script"; public static final String CLOUD_STORAGE_PREFIX = "gs://"; public static final String NOT_FOUND = "404 Not Found"; // BigQuery create/write disposition public static final String CREATE_NEVER = "CREATE_NEVER"; public static final String CREATE_IF_NEEDED = "CREATE_IF_NEEDED"; public static final String WRITE_APPEND = "WRITE_APPEND"; public static final String TYPE_STRING = "STRING"; // API error reasons public static final String RATE_LIMIT_EXCEEDED = "rateLimitExceeded"; public static final String QUOTA_EXCEEDED = "quotaExceeded"; /** * Call the metadata server, this returns details for the current instance not for * different instances. In order to retrieve the meta data of different instances * we just use the compute api, see getInstance * @param path * @return * @throws IOException */ public static String getMetaData(String path) throws IOException { log.debug("Retrieving metadata from server, path: " + path); URL metadata = new URL(METADATA_SERVER_URL + path); HttpURLConnection con = (HttpURLConnection) metadata.openConnection(); // optional default is GET //con.setRequestMethod("GET"); //add request header con.setRequestProperty("Metadata-Flavor", "Google"); int responseCode = con.getResponseCode(); StringBuilder response = new StringBuilder(); if(responseCode == 200) { try(BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) { String inputLine; while ((inputLine = in.readLine()) != null) { response.append(inputLine); } } } else { String msg = "Metadata server responded with status code: " + responseCode; log.error(msg); throw new IOException(msg); } log.debug("Successfully retrieved metadata from server"); return response.toString(); } /** * Return the metadata as a map * @param path * @return */ public static Map<String, Object> getMetaDataAsMap(String path) throws IOException { String metaData = getMetaData(path); return ObjectUtils.jsonToMap(metaData); } }
type string for bigquery columns
src/main/java/io/cloudex/cloud/impl/google/compute/GoogleMetaData.java
type string for bigquery columns
<ide><path>rc/main/java/io/cloudex/cloud/impl/google/compute/GoogleMetaData.java <ide> public static final String WRITE_APPEND = "WRITE_APPEND"; <ide> <ide> public static final String TYPE_STRING = "STRING"; <add> public static final String TYPE_INTEGER = "INTEGER"; <ide> <ide> // API error reasons <ide> public static final String RATE_LIMIT_EXCEEDED = "rateLimitExceeded";
Java
apache-2.0
0ea56309c5d64b103c9aa4c58ba165a5813c4ed0
0
apache/empire-db,apache/empire-db,apache/empire-db,apache/empire-db
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.empire.jsf2.controls; import java.io.IOException; import java.sql.Timestamp; import java.text.DateFormat; import java.text.NumberFormat; import java.text.ParseException; import java.util.Currency; import java.util.Date; import java.util.List; import java.util.Locale; import javax.faces.component.UIComponent; import javax.faces.component.html.HtmlInputText; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import org.apache.empire.commons.ObjectUtils; import org.apache.empire.commons.Options; import org.apache.empire.commons.StringUtils; import org.apache.empire.data.Column; import org.apache.empire.data.DataType; import org.apache.empire.db.DBColumn; import org.apache.empire.exceptions.InternalException; import org.apache.empire.exceptions.UnexpectedReturnValueException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TextInputControl extends InputControl { private static final Logger log = LoggerFactory.getLogger(TextInputControl.class); public static final String NAME = "text"; public static final String FORMAT_UNIT = "unit:"; public static final String FORMAT_UNIT_ATTRIBUTE = "format:unit"; public static final String DATE_FORMAT = "date-format:"; public static final String DATE_FORMAT_ATTRIBUTE = "format:date"; private Class<? extends javax.faces.component.html.HtmlInputText> inputComponentClass; public TextInputControl(String name, Class<? extends HtmlInputText> inputComponentClass) { super(name); this.inputComponentClass = inputComponentClass; } public TextInputControl(String name) { this(name, javax.faces.component.html.HtmlInputText.class); } public TextInputControl() { this(NAME, javax.faces.component.html.HtmlInputText.class); } @Override protected void createInputComponents(UIComponent parent, InputInfo ii, FacesContext context, List<UIComponent> compList) { HtmlInputText input; if (compList.size()==0) { try { input = inputComponentClass.newInstance(); } catch (InstantiationException e1) { throw new InternalException(e1); } catch (IllegalAccessException e2) { throw new InternalException(e2); } // once copyAttributes(parent, ii, input); // language input.setLang(ii.getLocale().getLanguage()); // maxlength int maxLength = getMaxInputLength(ii.getColumn()); if (maxLength>0) input.setMaxlength(maxLength); // add compList.add(input); } else { // check type UIComponent comp = compList.get(0); if (!(comp instanceof HtmlInputText)) throw new UnexpectedReturnValueException(comp.getClass().getName(), "compList.get"); // cast input = (HtmlInputText)comp; } // disabled Object dis = ii.getAttributeEx("disabled"); if (dis!=null) input.setDisabled(ObjectUtils.getBoolean(dis)); // field-readOnly if (ObjectUtils.getBoolean(dis)==false) input.setReadonly(ii.isFieldReadOnly()); // style addRemoveDisabledStyle(input, (input.isDisabled() || input.isReadonly())); addRemoveInvalidStyle(input, ii.hasError()); // set value setInputValue(input, ii); } // ------- parsing ------- @Override protected Object parseInputValue(String value, InputInfo ii) { // Trim if (hasFormatOption(ii, "notrim")==false) value = value.trim(); // Check Data Type Column column = ii.getColumn(); DataType type = column.getDataType(); if (type.isText()) return value; // Check other types if (type==DataType.INTEGER) { NumberFormat nf = NumberFormat.getIntegerInstance(ii.getLocale()); return parseNumber(value, nf); } if (type==DataType.DECIMAL || type==DataType.FLOAT) { NumberFormat nf = NumberFormat.getNumberInstance(ii.getLocale()); return parseNumber(value, nf); } if (type==DataType.DATE || type==DataType.DATETIME) { return parseDate(value, getDateFormat(column.getDataType(), ii, column)); } if (type==DataType.BOOL) { return ObjectUtils.getBoolean(value); } if (type==DataType.AUTOINC) { // autoinc log.error("Autoinc-value cannot be changed."); return null; } // Default return value; } // ------- validation ------- /* @Override protected Object validate(Object o, Locale locale, Column column, String s) { if (o instanceof Number) { Object min = column.getAttribute(InputControl.MINVALUE_ATTRIBUTE); Object max = column.getAttribute(InputControl.MAXVALUE_ATTRIBUTE); if (min!=null && max!=null) { Number n = (Number)o; if (n.intValue()<ObjectUtils.getInteger(min) || n.intValue()>ObjectUtils.getInteger(max)) { // Out of Range return error(WebErrors.InputValueOutOfRange, new String[] { min.toString(), max.toString() }, s); } } } return o; } */ // ------- formatting ------- @Override protected String formatValue(Object value, ValueInfo vi) { // Lookup and Print value Options options = vi.getOptions(); if (options != null && !options.isEmpty()) { // Check for Options String text = options.get(value); if (text != null) return vi.getText(text); // Error log.error("The element '" + String.valueOf(value) + "' is not part of the supplied option list."); } // Check Value if (value == null) { // Try to use default value Object nullValue = getFormatOption(vi, FORMAT_NULL, FORMAT_NULL_ATTRIBUTE); if (nullValue!=null) return formatValue(nullValue, vi); // Empty String return ""; } // Format Value Column column = vi.getColumn(); DataType dataType = getValueType(value, (column != null) ? column.getDataType() : DataType.UNKNOWN); if (dataType == DataType.TEXT || dataType == DataType.UNKNOWN) { // String String s = String.valueOf(value); if (hasFormatOption(vi, "noencode")) return s; // Encoded text return escapeHTML(s); } if (dataType == DataType.INTEGER || dataType == DataType.AUTOINC) { // Integer NumberFormat nf = NumberFormat.getIntegerInstance(vi.getLocale()); nf.setGroupingUsed(false); return nf.format(value); } if (dataType == DataType.DECIMAL || dataType == DataType.FLOAT) { // Dezimal oder Double NumberFormat nf = getNumberFormat(dataType, vi.getLocale(), column); return nf.format(value); } if (dataType == DataType.DATE || dataType == DataType.DATETIME) { // Date or DateTime if (dataType== DataType.DATETIME && hasFormatOption(vi, "notime")) dataType = DataType.DATE; // Now format the date according to the user's locale DateFormat df = getDateFormat(dataType, vi, column); return df.format(value); } /* * if (dataType == DBDataType.BOOL) { * } */ // Convert to String return escapeHTML(String.valueOf(value)); } /* protected String formatValue(ValueInfo vi, boolean appendUnit) { String text = super.formatValue(vi); if (appendUnit && text!=null && text.length()>0) { String unit = getUnitString(vi); if (unit != null) { // Append unit text += " " + unit; } } return text; } */ @Override protected Object formatInputValue(Object value, InputInfo ii) { if (value == null) return ""; // Check options Options options = ii.getOptions(); if (options != null && !options.isEmpty()) return value; // Format return formatValue(value, ii); } // ------- render ------- @Override public void renderValue(ValueInfo vi, ResponseWriter writer) throws IOException { String text = formatValue(vi); if (StringUtils.isEmpty(text)) { // nothing writer.append("&nbsp;"); return; } // append text writer.append(text); // unit? String unit = getUnitString(vi); if (StringUtils.isNotEmpty(unit)) { // append unit writer.append(" "); writer.append(unit); } } /* @Override public void renderInput(Response writer, ControlInfo ci) { HtmlTag input = writer.startTag("input"); input.addAttribute("type", "text"); input.addAttribute("id", ci.getId()); input.addAttribute("class", ci.getCssClass()); input.addAttribute("style", ci.getCssStyle()); if (ci.getDisabled()==false) { // Name of the field input.addAttribute("name", ci.getName()); // Get Max Length int maxLength = getMaxInputLength(ci.getColumn()); if (maxLength>0) { input.addAttribute("maxlength", maxLength); input.addAttribute("size", String.valueOf(Math.min(maxLength, ci.getHSize()))); } } else { // Disabled text control input.addAttribute("disabled"); // Get Max Length int maxLength = getMaxInputLength(ci.getColumn()); if (maxLength>0) { input.addAttribute("size", String.valueOf(Math.min(maxLength, ci.getHSize()))); } } // Value input.addAttribute("value", formatValue(ci, ci.getDisabled())); // Event Attributes input.addAttribute("onclick", ci.getOnclick()); input.addAttribute("onchange", ci.getOnchange()); input.addAttribute("onfocus", ci.getOnfocus()); input.addAttribute("onblur", ci.getOnblur()); input.endTag(); // Add Unit if (ci.getDisabled()==false) { String unit = getUnitString(ci); if (unit != null) { writer.print(" "); writer.print(unit); } } } */ // ------- Input Helpers ------- protected int getMaxInputLength(Column col) { // cast to DBTableColumn DataType type = col.getDataType(); if (type==DataType.CHAR || type==DataType.TEXT) return (int)Math.round(col.getSize()); if (type==DataType.AUTOINC || type==DataType.INTEGER) return 10; if (type==DataType.FLOAT) return 18; if (type==DataType.DECIMAL) { // check precision and scale double size = col.getSize(); int prec = (int)Math.round(size); if (prec == 0) return 0; int len = prec; // scale int scale =((int)(size*10)-(prec*10)); if (scale>0) len++; // Dezimaltrenner // thousand separator ? Object groupSep = col.getAttribute(InputControl.NUMBER_GROUPSEP_ATTRIBUTE); if (groupSep!=null && ObjectUtils.getBoolean(groupSep)) len += ((prec-scale-1)/3); // sign? Object minVal = col.getAttribute(DBColumn.DBCOLATTR_MINVALUE); if (minVal==null || ObjectUtils.getInteger(minVal)<0) len++; // Vorzeichen // fertig return len; } if (type==DataType.BOOL) return 1; if (type==DataType.DATE) return 10; if (type==DataType.DATETIME) return 16; if (type==DataType.CLOB) return 0; // unlimited (use 0x7FFFFFFF instead?) // undefined! log.info("No max-length available for data type {}.", type); return 0; } protected DataType getValueType(Object value, DataType desiredType) { // Detect Data Type from Value if (value instanceof String) return DataType.TEXT; if (value instanceof Number) { // Check desired type if (desiredType == DataType.AUTOINC || desiredType == DataType.INTEGER || desiredType == DataType.FLOAT || desiredType == DataType.DECIMAL) return desiredType; // Detect type if (value instanceof Integer || value instanceof Long || value instanceof Short) return DataType.INTEGER; if (value instanceof Float || value instanceof Double) return DataType.FLOAT; // default return DataType.DECIMAL; } if (value instanceof Date) { // Check desired type if (desiredType == DataType.DATETIME || desiredType == DataType.DATE) return desiredType; // Detect type if (value instanceof Timestamp) return DataType.DATETIME; // Just a date return DataType.DATE; } if (value instanceof Boolean) return DataType.BOOL; // Default Datatype return DataType.UNKNOWN; } protected NumberFormat getNumberFormat(DataType dataType, Locale locale, Column column) { if (column==null) return NumberFormat.getNumberInstance(locale); // Column is supplied String type = StringUtils.valueOf(column.getAttribute(InputControl.NUMBER_TYPE_ATTRIBUTE)); NumberFormat nf = null; if (type.equalsIgnoreCase("Integer")) nf = NumberFormat.getIntegerInstance(locale); else nf = NumberFormat.getNumberInstance(locale); // Groups Separator? Object groupSep = column.getAttribute(InputControl.NUMBER_GROUPSEP_ATTRIBUTE); if (groupSep!=null) nf.setGroupingUsed(ObjectUtils.getBoolean(groupSep)); // Fraction Digits? Object fractDigit = column.getAttribute(InputControl.NUMBER_FRACTION_DIGITS); if (fractDigit!=null) { int fractionDigits = ObjectUtils.getInteger(fractDigit); nf.setMaximumFractionDigits(fractionDigits); nf.setMinimumFractionDigits(fractionDigits); } // Number format return nf; } protected DateFormat getDateFormat(DataType dataType, ValueInfo vi, Column column) { int type = DateFormat.MEDIUM; // Is unit supplied as a format option String format = getFormatString(vi, DATE_FORMAT, DATE_FORMAT_ATTRIBUTE); if (format!=null) { // format has been provided if (StringUtils.compareEqual(format, "short", true)) type=DateFormat.SHORT; else if (StringUtils.compareEqual(format, "long", true)) type=DateFormat.LONG; } // return date formatter DateFormat df; if (dataType==DataType.DATE) df = DateFormat.getDateInstance(type, vi.getLocale()); else df = DateFormat.getDateTimeInstance(type, type, vi.getLocale()); return df; } private String getUnitString(ValueInfo vi) { // Is unit supplied as a format option String format = getFormatString(vi, FORMAT_UNIT, FORMAT_UNIT_ATTRIBUTE); if (format!=null) return format; // Is it a currency column Column column = vi.getColumn(); if (column!=null && column.getDataType()==DataType.DECIMAL) { String numberType = StringUtils.toString(column.getAttribute(InputControl.NUMBER_TYPE_ATTRIBUTE)); if (numberType!=null) { if (numberType.equalsIgnoreCase("Currency")) { String currencyCode = StringUtils.toString(column.getAttribute(InputControl.CURRENCY_CODE_ATTRIBUTE)); if (currencyCode!=null) { // nf = NumberFormat.getCurrencyInstance(locale); Currency currency = Currency.getInstance(currencyCode); return (currency!=null) ? currency.getSymbol() : null; } } else if (numberType.equalsIgnoreCase("Percent")) { return "%"; } } } // No Unit supplied return null; } // ------- value parsing ------- protected Object parseNumber(String s, NumberFormat nf) { // Try to convert for (int i=0; i<s.length(); i++) { if (s.charAt(i)>='A') throw new NumberFormatException("Not a number: "+s); } // Parse String try { return nf.parseObject(s); } catch(ParseException pe) { throw new NumberFormatException("Not a number: "+s+" Exception: "+pe.toString()); } } protected Object parseDate(String s, DateFormat df) { // Try to convert try { // Parse Date df.setLenient(true); return df.parseObject(s); } catch(ParseException pe) { throw new RuntimeException("Invalid date format: "+s, pe); } } }
empire-db-jsf2/src/main/java/org/apache/empire/jsf2/controls/TextInputControl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.empire.jsf2.controls; import java.io.IOException; import java.sql.Timestamp; import java.text.DateFormat; import java.text.NumberFormat; import java.text.ParseException; import java.util.Currency; import java.util.Date; import java.util.List; import java.util.Locale; import javax.faces.component.UIComponent; import javax.faces.component.html.HtmlInputText; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import org.apache.empire.commons.ObjectUtils; import org.apache.empire.commons.Options; import org.apache.empire.commons.StringUtils; import org.apache.empire.data.Column; import org.apache.empire.data.DataType; import org.apache.empire.db.DBColumn; import org.apache.empire.exceptions.InternalException; import org.apache.empire.exceptions.UnexpectedReturnValueException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TextInputControl extends InputControl { private static final Logger log = LoggerFactory.getLogger(TextInputControl.class); public static final String NAME = "text"; public static final String FORMAT_UNIT = "unit:"; public static final String FORMAT_UNIT_ATTRIBUTE = "format:unit"; public static final String DATE_FORMAT = "date-format:"; public static final String DATE_FORMAT_ATTRIBUTE = "format:date"; private Class<? extends javax.faces.component.html.HtmlInputText> inputComponentClass; public TextInputControl(String name, Class<? extends HtmlInputText> inputComponentClass) { super(name); this.inputComponentClass = inputComponentClass; } public TextInputControl(String name) { this(name, javax.faces.component.html.HtmlInputText.class); } public TextInputControl() { this(NAME, javax.faces.component.html.HtmlInputText.class); } @Override protected void createInputComponents(UIComponent parent, InputInfo ii, FacesContext context, List<UIComponent> compList) { HtmlInputText input; if (compList.size()==0) { try { input = inputComponentClass.newInstance(); } catch (InstantiationException e1) { throw new InternalException(e1); } catch (IllegalAccessException e2) { throw new InternalException(e2); } // once copyAttributes(parent, ii, input); // language input.setLang(ii.getLocale().getLanguage()); // maxlength int maxLength = getMaxInputLength(ii.getColumn()); if (maxLength>0) input.setMaxlength(maxLength); // add compList.add(input); } else { // check type UIComponent comp = compList.get(0); if (!(comp instanceof HtmlInputText)) throw new UnexpectedReturnValueException(comp.getClass().getName(), "compList.get"); // cast input = (HtmlInputText)comp; } // disabled Object dis = ii.getAttributeEx("disabled"); if (dis!=null) input.setDisabled(ObjectUtils.getBoolean(dis)); // field-readOnly if (ObjectUtils.getBoolean(dis)==false) input.setReadonly(ii.isFieldReadOnly()); // style addRemoveDisabledStyle(input, (input.isDisabled() || input.isReadonly())); addRemoveInvalidStyle(input, ii.hasError()); // set value setInputValue(input, ii); } // ------- parsing ------- @Override protected Object parseInputValue(String value, InputInfo ii) { // Trim if (hasFormatOption(ii, "notrim")==false) value = value.trim(); // Check Data Type Column column = ii.getColumn(); DataType type = column.getDataType(); if (type.isText()) return value; // Check other types if (type==DataType.INTEGER) { NumberFormat nf = NumberFormat.getIntegerInstance(ii.getLocale()); return parseNumber(value, nf); } if (type==DataType.DECIMAL || type==DataType.FLOAT) { NumberFormat nf = NumberFormat.getNumberInstance(ii.getLocale()); return parseNumber(value, nf); } if (type==DataType.DATE || type==DataType.DATETIME) { return parseDate(value, getDateFormat(column.getDataType(), ii, column)); } if (type==DataType.BOOL) { return ObjectUtils.getBoolean(value); } if (type==DataType.AUTOINC) { // autoinc log.error("Autoinc-value cannot be changed."); return null; } // Default return value; } // ------- validation ------- /* @Override protected Object validate(Object o, Locale locale, Column column, String s) { if (o instanceof Number) { Object min = column.getAttribute(InputControl.MINVALUE_ATTRIBUTE); Object max = column.getAttribute(InputControl.MAXVALUE_ATTRIBUTE); if (min!=null && max!=null) { Number n = (Number)o; if (n.intValue()<ObjectUtils.getInteger(min) || n.intValue()>ObjectUtils.getInteger(max)) { // Out of Range return error(WebErrors.InputValueOutOfRange, new String[] { min.toString(), max.toString() }, s); } } } return o; } */ // ------- formatting ------- @Override protected String formatValue(Object value, ValueInfo vi) { // Lookup and Print value Options options = vi.getOptions(); if (options != null && !options.isEmpty()) { // Check for Options String text = options.get(value); if (text != null) return vi.getText(text); // Error log.error("The element '" + String.valueOf(value) + "' is not part of the supplied option list."); } // Check Value if (value == null) { // Try to use default value Object nullValue = getFormatOption(vi, FORMAT_NULL, FORMAT_NULL_ATTRIBUTE); if (nullValue!=null) return formatValue(nullValue, vi); // Empty String return ""; } // Format Value Column column = vi.getColumn(); DataType dataType = getValueType(value, (column != null) ? column.getDataType() : DataType.UNKNOWN); if (dataType == DataType.TEXT || dataType == DataType.UNKNOWN) { // String String s = String.valueOf(value); if (hasFormatOption(vi, "noencode")) return s; // Encoded text return escapeHTML(s); } if (dataType == DataType.INTEGER || dataType == DataType.AUTOINC) { // Integer NumberFormat nf = NumberFormat.getIntegerInstance(vi.getLocale()); nf.setGroupingUsed(false); return nf.format(value); } if (dataType == DataType.DECIMAL || dataType == DataType.FLOAT) { // Dezimal oder Double NumberFormat nf = getNumberFormat(dataType, vi.getLocale(), column); return nf.format(value); } if (dataType == DataType.DATE || dataType == DataType.DATETIME) { // Date or DateTime if (dataType== DataType.DATETIME && hasFormatOption(vi, "notime")) dataType = DataType.DATE; // Now format the date according to the user's locale DateFormat df = getDateFormat(dataType, vi, column); return df.format(value); } /* * if (dataType == DBDataType.BOOL) { * } */ // Convert to String return escapeHTML(String.valueOf(value)); } /* protected String formatValue(ValueInfo vi, boolean appendUnit) { String text = super.formatValue(vi); if (appendUnit && text!=null && text.length()>0) { String unit = getUnitString(vi); if (unit != null) { // Append unit text += " " + unit; } } return text; } */ @Override protected Object formatInputValue(Object value, InputInfo ii) { if (value == null) return ""; // Check options Options options = ii.getOptions(); if (options != null && !options.isEmpty()) return value; // Format return formatValue(value, ii); } // ------- render ------- @Override public void renderValue(ValueInfo vi, ResponseWriter writer) throws IOException { String text = formatValue(vi); if (StringUtils.isEmpty(text)) { // nothing writer.append("&nbsp;"); return; } // append text writer.append(text); // unit? String unit = getUnitString(vi); if (StringUtils.isNotEmpty(unit)) { // append unit writer.append(" "); writer.append(unit); } } /* @Override public void renderInput(Response writer, ControlInfo ci) { HtmlTag input = writer.startTag("input"); input.addAttribute("type", "text"); input.addAttribute("id", ci.getId()); input.addAttribute("class", ci.getCssClass()); input.addAttribute("style", ci.getCssStyle()); if (ci.getDisabled()==false) { // Name of the field input.addAttribute("name", ci.getName()); // Get Max Length int maxLength = getMaxInputLength(ci.getColumn()); if (maxLength>0) { input.addAttribute("maxlength", maxLength); input.addAttribute("size", String.valueOf(Math.min(maxLength, ci.getHSize()))); } } else { // Disabled text control input.addAttribute("disabled"); // Get Max Length int maxLength = getMaxInputLength(ci.getColumn()); if (maxLength>0) { input.addAttribute("size", String.valueOf(Math.min(maxLength, ci.getHSize()))); } } // Value input.addAttribute("value", formatValue(ci, ci.getDisabled())); // Event Attributes input.addAttribute("onclick", ci.getOnclick()); input.addAttribute("onchange", ci.getOnchange()); input.addAttribute("onfocus", ci.getOnfocus()); input.addAttribute("onblur", ci.getOnblur()); input.endTag(); // Add Unit if (ci.getDisabled()==false) { String unit = getUnitString(ci); if (unit != null) { writer.print(" "); writer.print(unit); } } } */ // ------- Input Helpers ------- protected int getMaxInputLength(Column col) { // cast to DBTableColumn DataType type = col.getDataType(); if (type==DataType.CHAR || type==DataType.TEXT) return (int)Math.round(col.getSize()); if (type==DataType.AUTOINC || type==DataType.INTEGER) return 10; if (type==DataType.FLOAT) return 18; if (type==DataType.DECIMAL) { // check precision and scale double size = col.getSize(); int prec = (int)Math.round(size); if (prec == 0) return 0; int len = prec; // scale int scale =((int)(size*10)-(prec*10)); if (scale>0) len++; // Dezimaltrenner // thousand separator ? Object groupSep = col.getAttribute(InputControl.NUMBER_GROUPSEP_ATTRIBUTE); if (groupSep!=null && ObjectUtils.getBoolean(groupSep)) len += (prec/3); // sign? Object minVal = col.getAttribute(DBColumn.DBCOLATTR_MINVALUE); if (minVal==null || ObjectUtils.getInteger(minVal)<0) len++; // Vorzeichen // fertig return len; } if (type==DataType.BOOL) return 1; if (type==DataType.DATE) return 10; if (type==DataType.DATETIME) return 16; if (type==DataType.CLOB) return 0; // unlimited (use 0x7FFFFFFF instead?) // undefined! log.info("No max-length available for data type {}.", type); return 0; } protected DataType getValueType(Object value, DataType desiredType) { // Detect Data Type from Value if (value instanceof String) return DataType.TEXT; if (value instanceof Number) { // Check desired type if (desiredType == DataType.AUTOINC || desiredType == DataType.INTEGER || desiredType == DataType.FLOAT || desiredType == DataType.DECIMAL) return desiredType; // Detect type if (value instanceof Integer || value instanceof Long || value instanceof Short) return DataType.INTEGER; if (value instanceof Float || value instanceof Double) return DataType.FLOAT; // default return DataType.DECIMAL; } if (value instanceof Date) { // Check desired type if (desiredType == DataType.DATETIME || desiredType == DataType.DATE) return desiredType; // Detect type if (value instanceof Timestamp) return DataType.DATETIME; // Just a date return DataType.DATE; } if (value instanceof Boolean) return DataType.BOOL; // Default Datatype return DataType.UNKNOWN; } protected NumberFormat getNumberFormat(DataType dataType, Locale locale, Column column) { if (column==null) return NumberFormat.getNumberInstance(locale); // Column is supplied String type = StringUtils.valueOf(column.getAttribute(InputControl.NUMBER_TYPE_ATTRIBUTE)); NumberFormat nf = null; if (type.equalsIgnoreCase("Integer")) nf = NumberFormat.getIntegerInstance(locale); else nf = NumberFormat.getNumberInstance(locale); // Groups Separator? Object groupSep = column.getAttribute(InputControl.NUMBER_GROUPSEP_ATTRIBUTE); if (groupSep!=null) nf.setGroupingUsed(ObjectUtils.getBoolean(groupSep)); // Fraction Digits? Object fractDigit = column.getAttribute(InputControl.NUMBER_FRACTION_DIGITS); if (fractDigit!=null) { int fractionDigits = ObjectUtils.getInteger(fractDigit); nf.setMaximumFractionDigits(fractionDigits); nf.setMinimumFractionDigits(fractionDigits); } // Number format return nf; } protected DateFormat getDateFormat(DataType dataType, ValueInfo vi, Column column) { int type = DateFormat.MEDIUM; // Is unit supplied as a format option String format = getFormatString(vi, DATE_FORMAT, DATE_FORMAT_ATTRIBUTE); if (format!=null) { // format has been provided if (StringUtils.compareEqual(format, "short", true)) type=DateFormat.SHORT; else if (StringUtils.compareEqual(format, "long", true)) type=DateFormat.LONG; } // return date formatter DateFormat df; if (dataType==DataType.DATE) df = DateFormat.getDateInstance(type, vi.getLocale()); else df = DateFormat.getDateTimeInstance(type, type, vi.getLocale()); return df; } private String getUnitString(ValueInfo vi) { // Is unit supplied as a format option String format = getFormatString(vi, FORMAT_UNIT, FORMAT_UNIT_ATTRIBUTE); if (format!=null) return format; // Is it a currency column Column column = vi.getColumn(); if (column!=null && column.getDataType()==DataType.DECIMAL) { String numberType = StringUtils.toString(column.getAttribute(InputControl.NUMBER_TYPE_ATTRIBUTE)); if (numberType!=null) { if (numberType.equalsIgnoreCase("Currency")) { String currencyCode = StringUtils.toString(column.getAttribute(InputControl.CURRENCY_CODE_ATTRIBUTE)); if (currencyCode!=null) { // nf = NumberFormat.getCurrencyInstance(locale); Currency currency = Currency.getInstance(currencyCode); return (currency!=null) ? currency.getSymbol() : null; } } else if (numberType.equalsIgnoreCase("Percent")) { return "%"; } } } // No Unit supplied return null; } // ------- value parsing ------- protected Object parseNumber(String s, NumberFormat nf) { // Try to convert for (int i=0; i<s.length(); i++) { if (s.charAt(i)>='A') throw new NumberFormatException("Not a number: "+s); } // Parse String try { return nf.parseObject(s); } catch(ParseException pe) { throw new NumberFormatException("Not a number: "+s+" Exception: "+pe.toString()); } } protected Object parseDate(String s, DateFormat df) { // Try to convert try { // Parse Date df.setLenient(true); return df.parseObject(s); } catch(ParseException pe) { throw new RuntimeException("Invalid date format: "+s, pe); } } }
git-svn-id: https://svn.apache.org/repos/asf/empire-db/trunk@1387151 13f79535-47bb-0310-9956-ffa450edef68
empire-db-jsf2/src/main/java/org/apache/empire/jsf2/controls/TextInputControl.java
<ide><path>mpire-db-jsf2/src/main/java/org/apache/empire/jsf2/controls/TextInputControl.java <ide> // thousand separator ? <ide> Object groupSep = col.getAttribute(InputControl.NUMBER_GROUPSEP_ATTRIBUTE); <ide> if (groupSep!=null && ObjectUtils.getBoolean(groupSep)) <del> len += (prec/3); <add> len += ((prec-scale-1)/3); <ide> // sign? <ide> Object minVal = col.getAttribute(DBColumn.DBCOLATTR_MINVALUE); <ide> if (minVal==null || ObjectUtils.getInteger(minVal)<0)
Java
epl-1.0
7b549c09390dc225ca3617c2a543f693a15e708c
0
ForgeEssentials/ForgeEssentialsMain,Techjar/ForgeEssentials,liachmodded/ForgeEssentials,aschmois/ForgeEssentialsMain,planetguy32/ForgeEssentials,CityOfLearning/ForgeEssentials
package com.ForgeEssentials.commands; import java.util.HashMap; import java.util.List; import net.minecraft.command.ICommandSender; import net.minecraft.command.PlayerSelector; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.ChunkCoordinates; import com.ForgeEssentials.core.PlayerInfo; import com.ForgeEssentials.core.commands.ForgeEssentialsCommandBase; import com.ForgeEssentials.util.DataStorage; import com.ForgeEssentials.util.FunctionHelper; import com.ForgeEssentials.util.Localization; import com.ForgeEssentials.util.OutputHandler; import com.ForgeEssentials.util.TeleportCenter; import com.ForgeEssentials.util.AreaSelector.Point; import com.ForgeEssentials.util.AreaSelector.WarpPoint; import cpw.mods.fml.common.FMLCommonHandler; public class CommandSpawn extends ForgeEssentialsCommandBase { /** Spawn point for each dimension */ public static HashMap<Integer, Point> spawnPoints = new HashMap<Integer, Point>(); @Override public String getCommandName() { return "spawn"; } @Override public void processCommandPlayer(EntityPlayer sender, String[] args) { if (args.length >= 1) { EntityPlayer player = FunctionHelper.getPlayerFromUsername(args[0]); if (player != null) { // NBTTagCompound spawn = DataStorage.getData("spawn"); PlayerInfo.getPlayerInfo(player.username).back = new WarpPoint(player); WarpPoint spawn; ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); spawn = new WarpPoint(0, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); TeleportCenter.addToTpQue(spawn, player); player.sendChatToPlayer(Localization.get(Localization.SPAWNED)); } else { OutputHandler.chatError(sender, Localization.format(Localization.ERROR_NOPLAYER, args[0])); } } else { // NBTTagCompound data = DataStorage.getData("spawn"); WarpPoint spawn; // if(!(data == null)) // { // spawn = new WarpPoint(data.getInteger("dim"), data.getDouble("x"), data.getDouble("y"), // data.getDouble("z"), data.getFloat("pitch"), data.getFloat("yaw")); // } // else // { ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); spawn = new WarpPoint(0, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); // } // if (spawn != null) // { PlayerInfo.getPlayerInfo(sender.username).back = new WarpPoint(sender); TeleportCenter.addToTpQue(spawn, sender); // ((EntityPlayerMP) sender).playerNetServerHandler // .setPlayerLocation(spawn.posX, spawn.posY, spawn.posZ, sender.rotationYaw, sender.rotationPitch); sender.sendChatToPlayer(Localization.get(Localization.SPAWNED)); // } } } @Override public void processCommandConsole(ICommandSender sender, String[] args) { if (args.length >= 1) { EntityPlayer player = FMLCommonHandler.instance().getSidedDelegate().getServer().getConfigurationManager().getPlayerForUsername(args[0]); if(PlayerSelector.hasArguments(args[0])) { player = PlayerSelector.matchOnePlayer(sender, args[0]); } if (player != null) { PlayerInfo.getPlayerInfo(player.username).back = new WarpPoint(player); ChunkCoordinates spawnpoint = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); WarpPoint spawn = new WarpPoint(player.dimension, spawnpoint.posX, spawnpoint.posY, spawnpoint.posZ, player.rotationPitch, player.rotationYaw); TeleportCenter.addToTpQue(spawn, player); player.sendChatToPlayer(Localization.get(Localization.SPAWNED)); } else { sender.sendChatToPlayer(Localization.format(Localization.ERROR_NOPLAYER, args[0])); } } } @Override public boolean canConsoleUseCommand() { return true; } @Override public String getCommandPerm() { return "ForgeEssentials.BasicCommands." + getCommandName(); } @Override public List addTabCompletionOptions(ICommandSender sender, String[] args) { if (args.length == 1) { return getListOfStringsMatchingLastWord(args, FMLCommonHandler.instance().getMinecraftServerInstance().getAllUsernames()); } else { return null; } } }
src/FE_SRC_COMMON/com/ForgeEssentials/commands/CommandSpawn.java
package com.ForgeEssentials.commands; import java.util.HashMap; import java.util.List; import net.minecraft.command.ICommandSender; import net.minecraft.command.PlayerSelector; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.ChunkCoordinates; import com.ForgeEssentials.core.PlayerInfo; import com.ForgeEssentials.core.commands.ForgeEssentialsCommandBase; import com.ForgeEssentials.util.DataStorage; import com.ForgeEssentials.util.FunctionHelper; import com.ForgeEssentials.util.Localization; import com.ForgeEssentials.util.OutputHandler; import com.ForgeEssentials.util.TeleportCenter; import com.ForgeEssentials.util.AreaSelector.Point; import com.ForgeEssentials.util.AreaSelector.WarpPoint; import cpw.mods.fml.common.FMLCommonHandler; public class CommandSpawn extends ForgeEssentialsCommandBase { /** Spawn point for each dimension */ public static HashMap<Integer, Point> spawnPoints = new HashMap<Integer, Point>(); @Override public String getCommandName() { return "spawn"; } @Override public void processCommandPlayer(EntityPlayer sender, String[] args) { if (args.length >= 1) { EntityPlayer player = FunctionHelper.getPlayerFromUsername(args[0]); if (player != null) { // NBTTagCompound spawn = DataStorage.getData("spawn"); PlayerInfo.getPlayerInfo(player.username).back = new WarpPoint(player); WarpPoint spawn; ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); spawn = new WarpPoint(sender.dimension, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); TeleportCenter.addToTpQue(spawn, player); player.sendChatToPlayer(Localization.get(Localization.SPAWNED)); } else { OutputHandler.chatError(sender, Localization.format(Localization.ERROR_NOPLAYER, args[0])); } } else { // NBTTagCompound data = DataStorage.getData("spawn"); WarpPoint spawn; // if(!(data == null)) // { // spawn = new WarpPoint(data.getInteger("dim"), data.getDouble("x"), data.getDouble("y"), // data.getDouble("z"), data.getFloat("pitch"), data.getFloat("yaw")); // } // else // { ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); spawn = new WarpPoint(sender.dimension, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); // } // if (spawn != null) // { PlayerInfo.getPlayerInfo(sender.username).back = new WarpPoint(sender); TeleportCenter.addToTpQue(spawn, sender); // ((EntityPlayerMP) sender).playerNetServerHandler // .setPlayerLocation(spawn.posX, spawn.posY, spawn.posZ, sender.rotationYaw, sender.rotationPitch); sender.sendChatToPlayer(Localization.get(Localization.SPAWNED)); // } } } @Override public void processCommandConsole(ICommandSender sender, String[] args) { if (args.length >= 1) { EntityPlayer player = FMLCommonHandler.instance().getSidedDelegate().getServer().getConfigurationManager().getPlayerForUsername(args[0]); if(PlayerSelector.hasArguments(args[0])) { player = PlayerSelector.matchOnePlayer(sender, args[0]); } if (player != null) { PlayerInfo.getPlayerInfo(player.username).back = new WarpPoint(player); ChunkCoordinates spawnpoint = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); WarpPoint spawn = new WarpPoint(player.dimension, spawnpoint.posX, spawnpoint.posY, spawnpoint.posZ, player.rotationPitch, player.rotationYaw); TeleportCenter.addToTpQue(spawn, player); player.sendChatToPlayer(Localization.get(Localization.SPAWNED)); } else { sender.sendChatToPlayer(Localization.format(Localization.ERROR_NOPLAYER, args[0])); } } } @Override public boolean canConsoleUseCommand() { return true; } @Override public String getCommandPerm() { return "ForgeEssentials.BasicCommands." + getCommandName(); } @Override public List addTabCompletionOptions(ICommandSender sender, String[] args) { if (args.length == 1) { return getListOfStringsMatchingLastWord(args, FMLCommonHandler.instance().getMinecraftServerInstance().getAllUsernames()); } else { return null; } } }
Fixed /spawn taking you to spawnpoint in current dimension.
src/FE_SRC_COMMON/com/ForgeEssentials/commands/CommandSpawn.java
Fixed /spawn taking you to spawnpoint in current dimension.
<ide><path>rc/FE_SRC_COMMON/com/ForgeEssentials/commands/CommandSpawn.java <ide> <ide> WarpPoint spawn; <ide> ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); <del> spawn = new WarpPoint(sender.dimension, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); <add> spawn = new WarpPoint(0, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); <ide> TeleportCenter.addToTpQue(spawn, player); <ide> player.sendChatToPlayer(Localization.get(Localization.SPAWNED)); <ide> } <ide> // else <ide> // { <ide> ChunkCoordinates point = FMLCommonHandler.instance().getMinecraftServerInstance().worldServers[0].provider.getSpawnPoint(); <del> spawn = new WarpPoint(sender.dimension, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); <add> spawn = new WarpPoint(0, point.posX, point.posY, point.posZ, sender.rotationPitch, sender.rotationYaw); <ide> // } <ide> // if (spawn != null) <ide> // {
Java
mit
error: pathspec 'AzimuthalEquidistantProjection.java' did not match any file(s) known to git
a632677073b916a80548a8e0910bbc88e816896c
1
acnelson12/PP4
import static java.lang.Math.sin; import static java.lang.Math.cos; import static java.lang.Math.acos; public class AzimuthalEquidistantProjection { /* Instance Variables */ private final double α0; private final double δ0; /*== Constructors ==*/ public AzimuthalEquidistantProjection( final RightAscension CENTER_RA, final Declination CENTER_DEC ) { α0 = CENTER_RA.toRadians(); δ0 = CENTER_DEC.toRadians(); } /*== Accessors ==*/ public double[] getXY( final RightAscension RA, final Declination DEC ) { /* Local Constants */ final double α = RA.toRadians(); final double δ = DEC.toRadians(); final double K; final double COS_C; final double C; /* Local Variables */ double[] xy = new double[2]; /* Calculate x and y Coordinates */ COS_C = sin(δ0)*sin(δ) + cos(δ0)*cos(δ)*cos(α - α0); C = acos(COS_C); K = C / sin(C); xy[0] = K * cos(δ)*sin(α - α0); xy[1] = K * ( cos(δ0)*sin(δ) - sin(δ0)*cos(δ)*cos(α - α0) ); return xy; /* * See here: * http://mathworld.wolfram.com/AzimuthalEquidistantProjection.html */ } }
AzimuthalEquidistantProjection.java
Create AzimuthalEquidistantProjection.java
AzimuthalEquidistantProjection.java
Create AzimuthalEquidistantProjection.java
<ide><path>zimuthalEquidistantProjection.java <add>import static java.lang.Math.sin; <add>import static java.lang.Math.cos; <add>import static java.lang.Math.acos; <add> <add>public class AzimuthalEquidistantProjection <add>{ <add> /* Instance Variables */ <add> private final double α0; <add> private final double δ0; <add> <add> /*== Constructors ==*/ <add> public AzimuthalEquidistantProjection( final RightAscension CENTER_RA, <add> final Declination CENTER_DEC ) <add> { <add> α0 = CENTER_RA.toRadians(); <add> δ0 = CENTER_DEC.toRadians(); <add> } <add> <add> /*== Accessors ==*/ <add> public double[] getXY( final RightAscension RA, final Declination DEC ) <add> { <add> /* Local Constants */ <add> final double α = RA.toRadians(); <add> final double δ = DEC.toRadians(); <add> final double K; <add> final double COS_C; <add> final double C; <add> <add> /* Local Variables */ <add> double[] xy = new double[2]; <add> <add> /* Calculate x and y Coordinates */ <add> COS_C = sin(δ0)*sin(δ) + cos(δ0)*cos(δ)*cos(α - α0); <add> C = acos(COS_C); <add> K = C / sin(C); <add> xy[0] = K * cos(δ)*sin(α - α0); <add> xy[1] = K * ( cos(δ0)*sin(δ) - sin(δ0)*cos(δ)*cos(α - α0) ); <add> <add> return xy; <add> <add> /* <add> * See here: <add> * http://mathworld.wolfram.com/AzimuthalEquidistantProjection.html <add> */ <add> } <add>}
Java
lgpl-2.1
error: pathspec 'tests/org/biojavax/SimpleRankedDocRefTest.java' did not match any file(s) known to git
939711e4ad92b1b91547225e44289d18a2359cbf
1
sbliven/biojava,sbliven/biojava,sbliven/biojava
/* * SimpleRankedDocRefTest.java * JUnit based test * * Created on 12 November 2005, 15:43 */ package org.biojavax; import java.util.Collections; import junit.framework.*; /** * * @author Mark Schreiber */ public class SimpleRankedDocRefTest extends TestCase { DocRef dr; SimpleRankedDocRef ref; SimpleRankedDocRef ref2; int rank = 1; Integer start; Integer end; public SimpleRankedDocRefTest(String testName) { super(testName); start = new Integer(1); end = new Integer(25); dr = new SimpleDocRef(Collections.singletonList( new SimpleDocRefAuthor("Hubert Hubertson", false, false)), "Journal of Voodoo Virology"); } protected void setUp() throws Exception { ref = new SimpleRankedDocRef(dr, start, end, rank); } protected void tearDown() throws Exception { ref = null; } public static Test suite() { TestSuite suite = new TestSuite(SimpleRankedDocRefTest.class); return suite; } /** * Test of getRank method, of class org.biojavax.SimpleRankedDocRef. */ public void testGetRank() { System.out.println("testGetRank"); assertEquals(rank, ref.getRank()); } /** * Test of getDocumentReference method, of class org.biojavax.SimpleRankedDocRef. */ public void testGetDocumentReference() { System.out.println("testGetDocumentReference"); assertEquals(dr, ref.getDocumentReference()); } /** * Test of getStart method, of class org.biojavax.SimpleRankedDocRef. */ public void testGetStart() { System.out.println("testGetStart"); assertEquals(start, ref.getStart()); } /** * Test of getEnd method, of class org.biojavax.SimpleRankedDocRef. */ public void testGetEnd() { System.out.println("testGetEnd"); assertEquals(end, ref.getEnd()); } /** * Test of equals method, of class org.biojavax.SimpleRankedDocRef. */ public void testEquals() { System.out.println("testEquals"); assertTrue(ref.equals(ref)); assertFalse(ref.equals(new Object())); assertFalse(ref.equals(null)); //Two ranked document references are equal if they have the same rank //and refer to the same document reference. ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal assertTrue(ref.equals(ref2)); assertTrue(ref2.equals(ref)); ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal assertTrue(ref.equals(ref2)); assertTrue(ref2.equals(ref)); ref2 = new SimpleRankedDocRef(dr, start, end, 100); //not equal assertFalse(ref.equals(ref2)); assertFalse(ref2.equals(ref)); ref2 = new SimpleRankedDocRef(new SimpleDocRef( Collections.singletonList(new SimpleDocRefAuthor("Rev. Falliwell", false, false)), "Kansas Journal of Creationism"), start, end, 1); //not equal assertFalse(ref.equals(ref2)); assertFalse(ref2.equals(ref)); } /** * Test of compareTo method, of class org.biojavax.SimpleRankedDocRef. */ public void testCompareTo() { System.out.println("testCompareTo"); assertTrue(ref.compareTo(ref) == 0); //Two ranked document references are equal if they have the same rank //and refer to the same document reference. ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal assertTrue(ref.compareTo(ref2) == 0); assertTrue(ref2.compareTo(ref) == 0); ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal assertTrue(ref.compareTo(ref2) == 0); assertTrue(ref2.compareTo(ref) == 0); ref2 = new SimpleRankedDocRef(dr, start, end, 100); //not equal assertTrue(ref.compareTo(ref2) < 0); assertTrue(ref2.compareTo(ref) > 0); ref2 = new SimpleRankedDocRef(new SimpleDocRef( Collections.singletonList(new SimpleDocRefAuthor("Rev. Falliwell", false, false)), "Kansas Journal of Creationism"), start, end, 1); //not equal assertTrue(ref.compareTo(ref2) < 0); assertTrue(ref2.compareTo(ref) > 0); } /** * Test of hashCode method, of class org.biojavax.SimpleRankedDocRef. */ public void testHashCode() { System.out.println("testHashCode"); ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal assertTrue(ref.hashCode() == ref2.hashCode()); ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal assertTrue(ref.hashCode() == ref2.hashCode()); } /** * Test of toString method, of class org.biojavax.SimpleRankedDocRef. */ public void testToString() { System.out.println("testToString"); String expected = "(#"+rank+") "+dr; assertEquals(expected, ref.toString()); } }
tests/org/biojavax/SimpleRankedDocRefTest.java
initial commit git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@3667 7c6358e6-4a41-0410-a743-a5b2a554c398
tests/org/biojavax/SimpleRankedDocRefTest.java
initial commit
<ide><path>ests/org/biojavax/SimpleRankedDocRefTest.java <add>/* <add> * SimpleRankedDocRefTest.java <add> * JUnit based test <add> * <add> * Created on 12 November 2005, 15:43 <add> */ <add> <add>package org.biojavax; <add> <add>import java.util.Collections; <add>import junit.framework.*; <add> <add>/** <add> * <add> * @author Mark Schreiber <add> */ <add>public class SimpleRankedDocRefTest extends TestCase { <add> DocRef dr; <add> SimpleRankedDocRef ref; <add> SimpleRankedDocRef ref2; <add> int rank = 1; <add> Integer start; <add> Integer end; <add> <add> public SimpleRankedDocRefTest(String testName) { <add> super(testName); <add> start = new Integer(1); <add> end = new Integer(25); <add> dr = new SimpleDocRef(Collections.singletonList( <add> new SimpleDocRefAuthor("Hubert Hubertson", false, false)), "Journal of Voodoo Virology"); <add> } <add> <add> protected void setUp() throws Exception { <add> ref = new SimpleRankedDocRef(dr, start, end, rank); <add> } <add> <add> protected void tearDown() throws Exception { <add> ref = null; <add> } <add> <add> public static Test suite() { <add> TestSuite suite = new TestSuite(SimpleRankedDocRefTest.class); <add> <add> return suite; <add> } <add> <add> /** <add> * Test of getRank method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testGetRank() { <add> System.out.println("testGetRank"); <add> <add> assertEquals(rank, ref.getRank()); <add> } <add> <add> /** <add> * Test of getDocumentReference method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testGetDocumentReference() { <add> System.out.println("testGetDocumentReference"); <add> <add> assertEquals(dr, ref.getDocumentReference()); <add> } <add> <add> /** <add> * Test of getStart method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testGetStart() { <add> System.out.println("testGetStart"); <add> <add> assertEquals(start, ref.getStart()); <add> } <add> <add> /** <add> * Test of getEnd method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testGetEnd() { <add> System.out.println("testGetEnd"); <add> <add> assertEquals(end, ref.getEnd()); <add> } <add> <add> /** <add> * Test of equals method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testEquals() { <add> System.out.println("testEquals"); <add> <add> assertTrue(ref.equals(ref)); <add> assertFalse(ref.equals(new Object())); <add> assertFalse(ref.equals(null)); <add> //Two ranked document references are equal if they have the same rank <add> //and refer to the same document reference. <add> ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal <add> assertTrue(ref.equals(ref2)); <add> assertTrue(ref2.equals(ref)); <add> <add> ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal <add> assertTrue(ref.equals(ref2)); <add> assertTrue(ref2.equals(ref)); <add> <add> ref2 = new SimpleRankedDocRef(dr, start, end, 100); //not equal <add> assertFalse(ref.equals(ref2)); <add> assertFalse(ref2.equals(ref)); <add> <add> ref2 = new SimpleRankedDocRef(new SimpleDocRef( <add> Collections.singletonList(new SimpleDocRefAuthor("Rev. Falliwell", false, false)), <add> "Kansas Journal of Creationism"), start, end, 1); //not equal <add> assertFalse(ref.equals(ref2)); <add> assertFalse(ref2.equals(ref)); <add> } <add> <add> /** <add> * Test of compareTo method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testCompareTo() { <add> System.out.println("testCompareTo"); <add> <add> assertTrue(ref.compareTo(ref) == 0); <add> <add> //Two ranked document references are equal if they have the same rank <add> //and refer to the same document reference. <add> ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal <add> assertTrue(ref.compareTo(ref2) == 0); <add> assertTrue(ref2.compareTo(ref) == 0); <add> <add> ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal <add> assertTrue(ref.compareTo(ref2) == 0); <add> assertTrue(ref2.compareTo(ref) == 0); <add> <add> ref2 = new SimpleRankedDocRef(dr, start, end, 100); //not equal <add> assertTrue(ref.compareTo(ref2) < 0); <add> assertTrue(ref2.compareTo(ref) > 0); <add> <add> ref2 = new SimpleRankedDocRef(new SimpleDocRef( <add> Collections.singletonList(new SimpleDocRefAuthor("Rev. Falliwell", false, false)), <add> "Kansas Journal of Creationism"), start, end, 1); //not equal <add> assertTrue(ref.compareTo(ref2) < 0); <add> assertTrue(ref2.compareTo(ref) > 0); <add> } <add> <add> /** <add> * Test of hashCode method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testHashCode() { <add> System.out.println("testHashCode"); <add> <add> ref2 = new SimpleRankedDocRef(dr, start, end, 1); //equal <add> assertTrue(ref.hashCode() == ref2.hashCode()); <add> <add> ref2 = new SimpleRankedDocRef(dr, new Integer(30), new Integer(60), 1); //equal <add> assertTrue(ref.hashCode() == ref2.hashCode()); <add> } <add> <add> /** <add> * Test of toString method, of class org.biojavax.SimpleRankedDocRef. <add> */ <add> public void testToString() { <add> System.out.println("testToString"); <add> <add> String expected = "(#"+rank+") "+dr; <add> assertEquals(expected, ref.toString()); <add> } <add>}
Java
mit
624bbfd44444bb3b0df851a0769a087cb09e6d31
0
yeputons/spbau-java-course-torrent-term4,yeputons/spbau-java-course-torrent-term4
package net.yeputons.spbau.spring2016.torrent.client; import net.yeputons.spbau.spring2016.torrent.FileDescription; import net.yeputons.spbau.spring2016.torrent.StateHolder; import net.yeputons.spbau.spring2016.torrent.TorrentConnection; import net.yeputons.spbau.spring2016.torrent.protocol.FileEntry; import net.yeputons.spbau.spring2016.torrent.protocol.GetRequest; import net.yeputons.spbau.spring2016.torrent.protocol.SourcesRequest; import net.yeputons.spbau.spring2016.torrent.protocol.StatRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.RandomAccessFile; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; public class TorrentLeecher { private static final Logger LOG = LoggerFactory.getLogger(TorrentLeecher.class); private static final int RETRY_DELAY = 1000; private final TorrentConnection tracker; private final StateHolder<ClientState> stateHolder; private final FileDescription fileDescription; private final ScheduledExecutorService executorService; private final CountDownLatch finishedLatch = new CountDownLatch(1); public TorrentLeecher(TorrentConnection tracker, StateHolder<ClientState> stateHolder, FileDescription fileDescription, ScheduledExecutorService executorService) { this.tracker = tracker; this.stateHolder = stateHolder; this.fileDescription = fileDescription; this.executorService = executorService; } public void start() { LOG.info("Started downloading {}", fileDescription.getEntry()); this.executorService.submit(new LeechTask()); } public void join() throws InterruptedException { finishedLatch.await(); } private class LeechTask implements Runnable { private final FileEntry entry = fileDescription.getEntry(); private final int fileId = entry.getId(); private BitSet downloaded; LeechTask() { synchronized (stateHolder.getState()) { downloaded = (BitSet) fileDescription.getDownloaded().clone(); } } @Override public void run() { int partsCount = fileDescription.getPartsCount(); if (downloaded.cardinality() >= partsCount) { LOG.info("Downloading of {} is finished", entry); finishedLatch.countDown(); return; } LOG.debug("Downloaded: {}/{}", downloaded.cardinality(), partsCount); List<InetSocketAddress> sources = null; try { sources = tracker.makeRequest(new SourcesRequest(fileId)); } catch (IOException e) { LOG.error("Unable to request sources from tracker, will retry", e); executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); return; } Collections.shuffle(sources); LOG.debug("Sources: {}", sources); boolean downloadedSomething = false; loopForSources: for (InetSocketAddress source : sources) { try (TorrentConnection peer = TorrentConnection.connect(source)) { List<Integer> partsAvailable = peer.makeRequest(new StatRequest(fileId)); LOG.debug("Peer {} has {} parts available", source, partsAvailable.size()); for (int partId : partsAvailable) { if (downloaded.get(partId)) { continue; } LOG.debug("Retrieving part {} from {}", partId, source); ByteBuffer data = peer.makeRequest( new GetRequest(fileId, partId, fileDescription.getPartSize(partId))); ClientState state = stateHolder.getState(); try { RandomAccessFile file = state.getFile(fileId); synchronized (file) { file.seek(fileDescription.getPartStart(partId)); file.write(data.array()); } synchronized (state) { downloaded.flip(partId); fileDescription.getDownloaded().flip(partId); try { stateHolder.save(); } catch (IOException e) { downloaded.flip(partId); fileDescription.getDownloaded().flip(partId); } } downloadedSomething = true; break loopForSources; } catch (IOException e) { LOG.error("Error while saving file, will retry", e); executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); return; } } } catch (IOException e) { LOG.warn("Error while communicating with peer", e); } } if (!downloadedSomething) { LOG.debug("Sleeping until next iteration"); executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); } else { LOG.debug("Starting next iteration right away"); executorService.submit(this); } } } }
src/main/java/net/yeputons/spbau/spring2016/torrent/client/TorrentLeecher.java
package net.yeputons.spbau.spring2016.torrent.client; import net.yeputons.spbau.spring2016.torrent.FileDescription; import net.yeputons.spbau.spring2016.torrent.StateHolder; import net.yeputons.spbau.spring2016.torrent.TorrentConnection; import net.yeputons.spbau.spring2016.torrent.protocol.FileEntry; import net.yeputons.spbau.spring2016.torrent.protocol.GetRequest; import net.yeputons.spbau.spring2016.torrent.protocol.SourcesRequest; import net.yeputons.spbau.spring2016.torrent.protocol.StatRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.RandomAccessFile; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; public class TorrentLeecher { private static final Logger LOG = LoggerFactory.getLogger(TorrentLeecher.class); private static final int RETRY_DELAY = 1000; private final TorrentConnection tracker; private final StateHolder<ClientState> stateHolder; private final FileDescription fileDescription; private final ScheduledExecutorService executorService; private final CountDownLatch finishedLatch = new CountDownLatch(1); public TorrentLeecher(TorrentConnection tracker, StateHolder<ClientState> stateHolder, FileDescription fileDescription, ScheduledExecutorService executorService) { this.tracker = tracker; this.stateHolder = stateHolder; this.fileDescription = fileDescription; this.executorService = executorService; } public void start() { LOG.info("Started downloading {}", fileDescription.getEntry()); this.executorService.submit(new LeechTask()); } public void join() throws InterruptedException { finishedLatch.await(); } private class LeechTask implements Runnable { private final FileEntry entry = fileDescription.getEntry(); private final int fileId = entry.getId(); private BitSet downloaded; LeechTask() { synchronized (stateHolder.getState()) { downloaded = (BitSet) fileDescription.getDownloaded().clone(); } } @Override public void run() { int partsCount = fileDescription.getPartsCount(); if (downloaded.cardinality() >= partsCount) { LOG.info("Downloading of {} is finished", entry); finishedLatch.countDown(); return; } LOG.debug("Downloaded: {}/{}", downloaded.cardinality(), partsCount); List<InetSocketAddress> sources = null; try { sources = tracker.makeRequest(new SourcesRequest(fileId)); } catch (IOException e) { LOG.error("Unable to request sources from tracker", e); return; } Collections.shuffle(sources); LOG.debug("Sources: {}", sources); boolean downloadedSomething = false; loopForSources: for (InetSocketAddress source : sources) { try (TorrentConnection peer = TorrentConnection.connect(source)) { List<Integer> partsAvailable = peer.makeRequest(new StatRequest(fileId)); LOG.debug("Peer {} has {} parts available", source, partsAvailable.size()); for (int partId : partsAvailable) { if (downloaded.get(partId)) { continue; } LOG.debug("Retrieving part {} from {}", partId, source); ByteBuffer data = peer.makeRequest( new GetRequest(fileId, partId, fileDescription.getPartSize(partId))); ClientState state = stateHolder.getState(); try { RandomAccessFile file = state.getFile(fileId); synchronized (file) { file.seek(fileDescription.getPartStart(partId)); file.write(data.array()); } synchronized (state) { downloaded.flip(partId); fileDescription.getDownloaded().flip(partId); try { stateHolder.save(); } catch (IOException e) { downloaded.flip(partId); fileDescription.getDownloaded().flip(partId); } } downloadedSomething = true; break loopForSources; } catch (IOException e) { LOG.error("Error while saving file", e); return; } } } catch (IOException e) { LOG.warn("Error while communicating with peer", e); } } if (!downloadedSomething) { LOG.debug("Sleeping until next iteration"); executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); } else { LOG.debug("Starting next iteration right away"); executorService.submit(this); } } } }
TorrentLeecher: make it retry after all errors
src/main/java/net/yeputons/spbau/spring2016/torrent/client/TorrentLeecher.java
TorrentLeecher: make it retry after all errors
<ide><path>rc/main/java/net/yeputons/spbau/spring2016/torrent/client/TorrentLeecher.java <ide> try { <ide> sources = tracker.makeRequest(new SourcesRequest(fileId)); <ide> } catch (IOException e) { <del> LOG.error("Unable to request sources from tracker", e); <add> LOG.error("Unable to request sources from tracker, will retry", e); <add> executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); <ide> return; <ide> } <ide> Collections.shuffle(sources); <ide> downloadedSomething = true; <ide> break loopForSources; <ide> } catch (IOException e) { <del> LOG.error("Error while saving file", e); <add> LOG.error("Error while saving file, will retry", e); <add> executorService.schedule(this, RETRY_DELAY, TimeUnit.MILLISECONDS); <ide> return; <ide> } <ide> }
Java
mit
26991e6217a8f69b4626359ce7a9adc96a96bb58
0
ovr/phpinspectionsea-mirror,ovr/phpinspectionsea-mirror,ovr/phpinspectionsea-mirror
package com.kalessil.phpStorm.phpInspectionsEA; import com.intellij.codeInspection.InspectionToolProvider; /* ===Release notes=== AlterInForeachInspection: deactivated by default needs to be complemented with reference mismatch inspection ForeachSourceInspector: deactivated to default due to lots of issues with it NotOptimalIfConditionsInspection: type check functions handling changed, reduced amount of false-positives, some string/null/number will be recovered in 1.2.x StrlenInEmptyStringCheckContextInspection: was one of first inspections, rewritten completely AliasFunctionsUsageInspector: rewind removed (AliasFunctionsFixer feedback) DefaultValueInElseBranchInspector: fixed false positives on array element addition (fixed #35), warning level lowered to weak warning TypeUnsafeArraySearchInspection: when 1st parameter is string show different message (fixed #29) StrStr, ArraySearch: complete ===TODO===: NotOptimalIfConditionsInspection: null/string/number comparison with variable/property - additional check in costs analysis + own messages NotOptimalIfConditionsInspection (increment to 1.2.0): dedicate all comparisons to separate inspection, specialized in logical bugs. e.g. null/instanceof combination. ===POOL=== Regex semantics lookup [\x] => \x [0-9] => \d [seg][seq]... => [seq]{N} [seg][seq]+ => [seq]{2,} [seg][seq]* => [seq]+ [seg][seq]? => [seq]{1,2} [:class:] => \x /^text/ => strpos === 0 /text/ => strpos !== false $cookies[count($cookies) - 1] - replacement is 'end(...)', but it changes internal pointer in array, so can introduce side-effects in loops - legal in unset context (1 ... n parameters) ctype_alnum|ctype_alpha vs regular expressions test - challenge is polymorphic pattern recognition current(array_keys(...)) => key(), rare case AdditionOperationOnArraysInspection: - re-implement to check any of binary/mathematical operations has been applied on an array StaticInvocationViaThisInspector: - static calls on any objects, not only this (may be quite heavy due to index lookup) Empty functions/methods: - stubs, design issues Empty try/catch - bad code, like no scream 'For' loops, array_walk with closure: use foreach instead Magic numbers: needs additional research here Confusing construct: BO ? bool|BO : BO|bool PHP 5 migration: reflection API usage (ReflectionClass): constant, is_a, method_exists, property_exists, is_subclass_of are from PHP 4 world and not dealing with traits, annotations and so on. Mark deprecated. */ public class PhpInspectionsEAProvider implements InspectionToolProvider { @Override public Class[] getInspectionClasses() { return new Class[]{}; } }
src/com/kalessil/phpStorm/phpInspectionsEA/PhpInspectionsEAProvider.java
package com.kalessil.phpStorm.phpInspectionsEA; import com.intellij.codeInspection.InspectionToolProvider; /* ===Release notes=== ===TODO===: NotOptimalIfConditionsInspection: null/string/number comparison with variable/property - additional check in costs analysis + own messages NotOptimalIfConditionsInspection (increment to 1.2.0): dedicate all comparisons to separate inspection, specialized in logical bugs. e.g. null/instanceof combination. ===POOL=== Regex semantics lookup [\x] => \x [0-9] => \d [seg][seq]... => [seq]{N} [seg][seq]+ => [seq]{2,} [seg][seq]* => [seq]+ [seg][seq]? => [seq]{1,2} [:class:] => \x /^text/ => strpos === 0 /text/ => strpos !== false $cookies[count($cookies) - 1] - replacement is 'end(...)', but it changes internal pointer in array, so can introduce side-effects in loops - legal in unset context (1 ... n parameters) ctype_alnum|ctype_alpha vs regular expressions test - challenge is polymorphic pattern recognition current(array_keys(...)) => key(), rare case AdditionOperationOnArraysInspection: - re-implement to check any of binary/mathematical operations has been applied on an array StaticInvocationViaThisInspector: - static calls on any objects, not only this (may be quite heavy due to index lookup) Empty functions/methods: - stubs, design issues Empty try/catch - bad code, like no scream 'For' loops, array_walk with closure: use foreach instead Magic numbers: needs additional research here Confusing construct: BO ? bool|BO : BO|bool PHP 5 migration: reflection API usage (ReflectionClass): constant, is_a, method_exists, property_exists, is_subclass_of are from PHP 4 world and not dealing with traits, annotations and so on. Mark deprecated. */ public class PhpInspectionsEAProvider implements InspectionToolProvider { @Override public Class[] getInspectionClasses() { return new Class[]{}; } }
Release notes pre-collected
src/com/kalessil/phpStorm/phpInspectionsEA/PhpInspectionsEAProvider.java
Release notes pre-collected
<ide><path>rc/com/kalessil/phpStorm/phpInspectionsEA/PhpInspectionsEAProvider.java <ide> <ide> /* <ide> ===Release notes=== <add> <add>AlterInForeachInspection: deactivated by default needs to be complemented with reference mismatch inspection <add>ForeachSourceInspector: deactivated to default due to lots of issues with it <add> <add>NotOptimalIfConditionsInspection: type check functions handling changed, reduced amount of false-positives, some string/null/number will be recovered in 1.2.x <add>StrlenInEmptyStringCheckContextInspection: was one of first inspections, rewritten completely <add>AliasFunctionsUsageInspector: rewind removed (AliasFunctionsFixer feedback) <add>DefaultValueInElseBranchInspector: fixed false positives on array element addition (fixed #35), warning level lowered to weak warning <add>TypeUnsafeArraySearchInspection: when 1st parameter is string show different message (fixed #29) <add> <add>StrStr, ArraySearch: complete <ide> <ide> ===TODO===: <ide>
Java
apache-2.0
1f62ab05727bdf1e4c374f80f56db3bb5549daf0
0
karamelchef/karamel,karamelchef/karamel,karamelchef/karamel,karamelchef/karamel,karamelchef/karamel
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package se.kth.karamel.backend.machines; import java.io.IOException; import java.io.SequenceInputStream; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import net.schmizz.sshj.SSHClient; import net.schmizz.sshj.connection.ConnectionException; import net.schmizz.sshj.connection.channel.direct.Session; import net.schmizz.sshj.transport.TransportException; import net.schmizz.sshj.transport.verification.PromiscuousVerifier; import net.schmizz.sshj.userauth.keyprovider.KeyProvider; import org.apache.log4j.Logger; import se.kth.karamel.backend.running.model.MachineRuntime; import se.kth.karamel.backend.running.model.tasks.ShellCommand; import se.kth.karamel.backend.running.model.tasks.Task; import se.kth.karamel.backend.running.model.tasks.Task.Status; import se.kth.karamel.common.Settings; import se.kth.karamel.common.exception.KaramelException; import org.bouncycastle.jce.provider.BouncyCastleProvider; import java.security.Security; import net.schmizz.sshj.userauth.UserAuthException; import se.kth.karamel.backend.LogService; import se.kth.karamel.backend.running.model.ClusterRuntime; import se.kth.karamel.backend.running.model.Failure; /** * * @author kamal */ public class SshMachine implements Runnable { static { Security.addProvider(new BouncyCastleProvider()); } private static final Logger logger = Logger.getLogger(SshMachine.class); private final MachineRuntime machineEntity; private final String serverPubKey; private final String serverPrivateKey; private SSHClient client; private long lastHeartbeat = 0; private final BlockingQueue<Task> taskQueue = new ArrayBlockingQueue<>(Settings.MACHINES_TASKQUEUE_SIZE); private boolean stopping = false; private SshShell shell; public SshMachine(MachineRuntime machineEntity, String serverPubKey, String serverPrivateKey) { this.machineEntity = machineEntity; this.serverPubKey = serverPubKey; this.serverPrivateKey = serverPrivateKey; this.shell = new SshShell(serverPrivateKey, serverPubKey, machineEntity.getPublicIp(), machineEntity.getSshUser(), machineEntity.getSshPort()); } public MachineRuntime getMachineEntity() { return machineEntity; } public SshShell getShell() { return shell; } public void setStopping(boolean stopping) { this.stopping = stopping; } public void pause() { if (machineEntity.getTasksStatus().ordinal() < MachineRuntime.TasksStatus.PAUSING.ordinal()) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSING, null, null); } } public void resume() { if (machineEntity.getTasksStatus() != MachineRuntime.TasksStatus.FAILED) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.ONGOING, null, null); } } @Override public void run() { logger.info(String.format("Started SSH_Machine to '%s' d'-'", machineEntity.getId())); try { while (true && !stopping) { try { if (machineEntity.getLifeStatus() == MachineRuntime.LifeStatus.CONNECTED && machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.ONGOING) { Task task = null; try { logger.debug("Going to take a task from the queue"); task = taskQueue.take(); logger.debug(String.format("Task was taken from the queue.. '%s'", task.getName())); runTask(task); } catch (InterruptedException ex) { if (stopping) { logger.info(String.format("Stopping SSH_Machine to '%s'", machineEntity.getId())); return; } else { logger.error("Got interrupted without having recieved stopping signal"); } } } else { if (machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.PAUSING) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSED, null, null); } try { Thread.sleep(Settings.MACHINE_TASKRUNNER_BUSYWAITING_INTERVALS); } catch (InterruptedException ex) { if (!stopping) { logger.error("Got interrupted without having recieved stopping signal"); } } } } catch (Exception e) { logger.error("", e); } } } finally { disconnect(); } } public void enqueue(Task task) throws KaramelException { logger.debug(String.format("Queuing '%s'", task.toString())); try { taskQueue.put(task); task.queued(); } catch (InterruptedException ex) { String message = String.format("Couldn't queue task '%s' on machine '%s'", task.getName(), machineEntity.getId()); task.failed(message); throw new KaramelException(message, ex); } } private void runTask(Task task) { try { task.started(); List<ShellCommand> commands = task.getCommands(); for (ShellCommand cmd : commands) { if (cmd.getStatus() != ShellCommand.Status.DONE) { runSshCmd(cmd, task); if (cmd.getStatus() != ShellCommand.Status.DONE) { task.failed(String.format("Incompleted command '%s", cmd.getCmdStr())); break; } } } if (task.getStatus() == Status.ONGOING) { task.succeed(); } } catch (Exception ex) { task.failed(ex.getMessage()); } } private void runSshCmd(ShellCommand shellCommand, Task task) { shellCommand.setStatus(ShellCommand.Status.ONGOING); Session session = null; try { logger.info(machineEntity.getId() + " => " + shellCommand.getCmdStr()); session = client.startSession(); Session.Command cmd = session.exec(shellCommand.getCmdStr()); cmd.join(60 * 24, TimeUnit.MINUTES); updateHeartbeat(); if (cmd.getExitStatus() != 0) { shellCommand.setStatus(ShellCommand.Status.FAILED); } else { shellCommand.setStatus(ShellCommand.Status.DONE); } SequenceInputStream sequenceInputStream = new SequenceInputStream(cmd.getInputStream(), cmd.getErrorStream()); LogService.serializeTaskLog(task, machineEntity.getPublicIp(), sequenceInputStream); } catch (ConnectionException | TransportException ex) { if (getMachineEntity().getGroup().getCluster().getPhase() != ClusterRuntime.ClusterPhases.PURGING) { logger.error(String.format("Couldn't excecute command on client '%s' ", machineEntity.getId()), ex); } } finally { if (session != null) { try { session.close(); } catch (TransportException | ConnectionException ex) { logger.error(String.format("Couldn't close ssh session to '%s' ", machineEntity.getId()), ex); } } } } private boolean connect() throws KaramelException { try { KeyProvider keys = null; client = new SSHClient(); client.addHostKeyVerifier(new PromiscuousVerifier()); client.setConnectTimeout(Settings.SSH_CONNECTION_TIMEOUT); client.setTimeout(Settings.SSH_SESSION_TIMEOUT); keys = client.loadKeys(serverPrivateKey, serverPubKey, null); logger.info(String.format("connecting to '%s'...", machineEntity.getId())); try { client.connect(machineEntity.getPublicIp(), machineEntity.getSshPort()); } catch (IOException ex) { logger.warn(String.format("Opps!! coudln't connect to '%s' :@", machineEntity.getId())); logger.debug(ex); } if (client.isConnected()) { logger.info(String.format("Yey!! connected to '%s' ^-^", machineEntity.getId())); machineEntity.getGroup().getCluster().resolveFailure(Failure.hash(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp())); client.authPublickey(machineEntity.getSshUser(), keys); machineEntity.setLifeStatus(MachineRuntime.LifeStatus.CONNECTED); return true; } else { logger.error(String.format("Mehh!! no connection to '%s', is the port '%d' open?", machineEntity.getId(), machineEntity.getSshPort())); machineEntity.setLifeStatus(MachineRuntime.LifeStatus.UNREACHABLE); return false; } } catch (UserAuthException ex) { String message = "Issue for using ssh keys, make sure you keypair is not password protected.."; KaramelException exp = new KaramelException(message, ex); machineEntity.getGroup().getCluster().issueFailure(new Failure(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp(), message)); throw exp; } catch (IOException e) { throw new KaramelException(e); } } public void disconnect() { logger.info(String.format("Closing ssh session to '%s'", machineEntity.getId())); try { if (client != null && client.isConnected()) { client.close(); } } catch (IOException ex) { } } public boolean ping() throws KaramelException { if (lastHeartbeat < System.currentTimeMillis() - Settings.SSH_PING_INTERVAL) { if (client != null && client.isConnected()) { updateHeartbeat(); return true; } else { return connect(); } } else { return true; } } private void updateHeartbeat() { lastHeartbeat = System.currentTimeMillis(); } }
karamel-core/src/main/java/se/kth/karamel/backend/machines/SshMachine.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package se.kth.karamel.backend.machines; import com.google.gson.JsonArray; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; import com.google.gson.stream.JsonReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.SequenceInputStream; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import net.schmizz.sshj.SSHClient; import net.schmizz.sshj.connection.ConnectionException; import net.schmizz.sshj.connection.channel.direct.Session; import net.schmizz.sshj.transport.TransportException; import net.schmizz.sshj.transport.verification.PromiscuousVerifier; import net.schmizz.sshj.userauth.keyprovider.KeyProvider; import org.apache.log4j.Logger; import se.kth.karamel.backend.running.model.MachineRuntime; import se.kth.karamel.backend.running.model.tasks.ShellCommand; import se.kth.karamel.backend.running.model.tasks.Task; import se.kth.karamel.backend.running.model.tasks.Task.Status; import se.kth.karamel.common.Settings; import se.kth.karamel.common.exception.KaramelException; import org.bouncycastle.jce.provider.BouncyCastleProvider; import java.security.Security; import java.util.concurrent.ConcurrentHashMap; import net.schmizz.sshj.userauth.UserAuthException; import net.schmizz.sshj.xfer.scp.SCPFileTransfer; import se.kth.karamel.backend.LogService; import se.kth.karamel.backend.running.model.ClusterRuntime; import se.kth.karamel.backend.running.model.Failure; import se.kth.karamel.backend.running.model.tasks.RunRecipeTask; /** * * @author kamal */ public class SshMachine implements Runnable { static { Security.addProvider(new BouncyCastleProvider()); } private static final Logger logger = Logger.getLogger(SshMachine.class); private final MachineRuntime machineEntity; private final String serverPubKey; private final String serverPrivateKey; private SSHClient client; private long lastHeartbeat = 0; private final BlockingQueue<Task> taskQueue = new ArrayBlockingQueue<>(Settings.MACHINES_TASKQUEUE_SIZE); private boolean stopping = false; private SshShell shell; private final ConcurrentHashMap<RunRecipeTask, JsonArray> resultsMap = new ConcurrentHashMap<>(); public SshMachine(MachineRuntime machineEntity, String serverPubKey, String serverPrivateKey) { this.machineEntity = machineEntity; this.serverPubKey = serverPubKey; this.serverPrivateKey = serverPrivateKey; this.shell = new SshShell(serverPrivateKey, serverPubKey, machineEntity.getPublicIp(), machineEntity.getSshUser(), machineEntity.getSshPort()); } public MachineRuntime getMachineEntity() { return machineEntity; } public SshShell getShell() { return shell; } public void setStopping(boolean stopping) { this.stopping = stopping; } public void pause() { if (machineEntity.getTasksStatus().ordinal() < MachineRuntime.TasksStatus.PAUSING.ordinal()) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSING, null, null); } } public void resume() { if (machineEntity.getTasksStatus() != MachineRuntime.TasksStatus.FAILED) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.ONGOING, null, null); } } @Override public void run() { logger.info(String.format("Started SSH_Machine to '%s' d'-'", machineEntity.getId())); try { while (true && !stopping) { try { if (machineEntity.getLifeStatus() == MachineRuntime.LifeStatus.CONNECTED && machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.ONGOING) { Task task = null; try { logger.debug("Going to take a task from the queue"); task = taskQueue.take(); logger.debug(String.format("Task was taken from the queue.. '%s'", task.getName())); JsonArray res = runTask(task); if (res != null) { RunRecipeTask rrt = (RunRecipeTask) task; // TODO - how to pass on return values to update the dag.... resultsMap.put(rrt, res); } } catch (InterruptedException ex) { if (stopping) { logger.info(String.format("Stopping SSH_Machine to '%s'", machineEntity.getId())); return; } else { logger.error("Got interrupted without having recieved stopping signal"); } } } else { if (machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.PAUSING) { machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSED, null, null); } try { Thread.sleep(Settings.MACHINE_TASKRUNNER_BUSYWAITING_INTERVALS); } catch (InterruptedException ex) { if (!stopping) { logger.error("Got interrupted without having recieved stopping signal"); } } } } catch (Exception e) { logger.error("", e); } } } finally { disconnect(); } } public void enqueue(Task task) throws KaramelException { logger.debug(String.format("Queuing '%s'", task.toString())); try { taskQueue.put(task); task.queued(); } catch (InterruptedException ex) { String message = String.format("Couldn't queue task '%s' on machine '%s'", task.getName(), machineEntity.getId()); task.failed(message); throw new KaramelException(message, ex); } } private synchronized JsonArray runTask(Task task) { JsonArray res = null; try { task.started(); List<ShellCommand> commands = task.getCommands(); for (ShellCommand cmd : commands) { if (cmd.getStatus() != ShellCommand.Status.DONE) { res = runSshCmd(cmd, task); if (cmd.getStatus() != ShellCommand.Status.DONE) { task.failed(String.format("Incompleted command '%s", cmd.getCmdStr())); break; } } } if (task.getStatus() == Status.ONGOING) { task.succeed(); } } catch (Exception ex) { task.failed(ex.getMessage()); } return res; } private synchronized JsonArray runSshCmd(ShellCommand shellCommand, Task task) { shellCommand.setStatus(ShellCommand.Status.ONGOING); Session session = null; JsonArray res = null; try { //SesshionChannle logs the same thing logger.info(machineEntity.getId() + " => " + shellCommand.getCmdStr()); session = client.startSession(); Session.Command cmd = session.exec(shellCommand.getCmdStr()); cmd.join(60 * 24, TimeUnit.MINUTES); updateHeartbeat(); if (cmd.getExitStatus() != 0) { shellCommand.setStatus(ShellCommand.Status.FAILED); } else { shellCommand.setStatus(ShellCommand.Status.DONE); if (task instanceof RunRecipeTask) { RunRecipeTask rrt = (RunRecipeTask) task; try { JsonArray results = downloadResultsScp(rrt.getCookbookName(), rrt.getRecipeCanonicalName()); rrt.setResults(results); res = results; } catch (JsonParseException p) { logger.error("Bug in Chef Cookbook - Results were not a valid json document: " + rrt.getCookbookName()+ "::" + rrt.getRecipeCanonicalName()); rrt.setResults(null); } catch (IOException e) { logger.error("Possible network problem. No results were able to be downloaded for: " + rrt.getCookbookName()+ "::" + rrt.getRecipeCanonicalName()); rrt.setResults(null); } } } SequenceInputStream sequenceInputStream = new SequenceInputStream(cmd.getInputStream(), cmd.getErrorStream()); LogService.serializeTaskLog(task, machineEntity.getPublicIp(), sequenceInputStream); } catch (ConnectionException | TransportException ex) { if (getMachineEntity().getGroup().getCluster().getPhase() != ClusterRuntime.ClusterPhases.PURGING) { logger.error(String.format("Couldn't excecute command on client '%s' ", machineEntity.getId()), ex); } } finally { if (session != null) { try { client.close(); } catch (IOException ex) { } } } return res; } private boolean connect() throws KaramelException { try { KeyProvider keys = null; client = new SSHClient(); client.addHostKeyVerifier(new PromiscuousVerifier()); client.setConnectTimeout(Settings.SSH_CONNECTION_TIMEOUT); client.setTimeout(Settings.SSH_SESSION_TIMEOUT); keys = client.loadKeys(serverPrivateKey, serverPubKey, null); logger.info(String.format("connecting to '%s'...", machineEntity.getId())); try { client.connect(machineEntity.getPublicIp(), machineEntity.getSshPort()); } catch (IOException ex) { logger.warn(String.format("Opps!! coudln't connect to '%s' :@", machineEntity.getId())); logger.debug(ex); } if (client.isConnected()) { logger.info(String.format("Yey!! connected to '%s' ^-^", machineEntity.getId())); machineEntity.getGroup().getCluster().resolveFailure(Failure.hash(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp())); client.authPublickey(machineEntity.getSshUser(), keys); machineEntity.setLifeStatus(MachineRuntime.LifeStatus.CONNECTED); return true; } else { logger.error(String.format("Mehh!! no connection to '%s', is the port '%d' open?", machineEntity.getId(), machineEntity.getSshPort())); machineEntity.setLifeStatus(MachineRuntime.LifeStatus.UNREACHABLE); return false; } } catch (UserAuthException ex) { String message = "Issue for using ssh keys, make sure you keypair is not password protected.."; KaramelException exp = new KaramelException(message, ex); machineEntity.getGroup().getCluster().issueFailure(new Failure(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp(), message)); throw exp; } catch (IOException e) { throw new KaramelException(e); } } public void disconnect() { logger.info(String.format("Closing ssh session to '%s'", machineEntity.getId())); try { if (client != null && client.isConnected()) { client.close(); } } catch (IOException ex) { } } /** * * @param rrt * @return null if the Map doesn't contain a result for this RunRecipeTask, * otherwise a JsonArray */ public JsonArray getRecipeResult(RunRecipeTask rrt) { JsonArray res = resultsMap.get(rrt); if (res != null) { // remove result from Map if found resultsMap.remove(rrt); } return res; } /** * http://unix.stackexchange.com/questions/136165/java-code-to-copy-files-from-one-linux-machine-to-another-linux-machine * * @param session * @param cookbook * @param recipe */ private synchronized JsonArray downloadResultsScp(String cookbook, String recipe) throws IOException { String remoteFile = "~/" + cookbook + "__" + recipe + ".out"; SCPFileTransfer scp = client.newSCPFileTransfer(); String localResultsFile = Settings.KARAMEL_TMP_PATH + File.separator + cookbook + "__" + recipe + ".out"; File f = new File(localResultsFile); // TODO - should move this to some initialization method f.mkdirs(); if (f.exists()) { f.delete(); } // TODO: error checking here... scp.download(remoteFile, localResultsFile); JsonReader reader = new JsonReader(new FileReader(localResultsFile)); JsonParser jsonParser = new JsonParser(); return jsonParser.parse(reader).getAsJsonArray(); } public boolean ping() throws KaramelException { if (lastHeartbeat < System.currentTimeMillis() - Settings.SSH_PING_INTERVAL) { if (client != null && client.isConnected()) { updateHeartbeat(); return true; } else { return connect(); } } else { return true; } } private void updateHeartbeat() { lastHeartbeat = System.currentTimeMillis(); } }
rolled back SshMachine
karamel-core/src/main/java/se/kth/karamel/backend/machines/SshMachine.java
rolled back SshMachine
<ide><path>aramel-core/src/main/java/se/kth/karamel/backend/machines/SshMachine.java <ide> */ <ide> package se.kth.karamel.backend.machines; <ide> <del>import com.google.gson.JsonArray; <del>import com.google.gson.JsonParseException; <del>import com.google.gson.JsonParser; <del>import com.google.gson.stream.JsonReader; <del>import java.io.File; <del>import java.io.FileReader; <ide> import java.io.IOException; <ide> import java.io.SequenceInputStream; <ide> import java.util.List; <ide> import se.kth.karamel.common.exception.KaramelException; <ide> import org.bouncycastle.jce.provider.BouncyCastleProvider; <ide> import java.security.Security; <del>import java.util.concurrent.ConcurrentHashMap; <ide> import net.schmizz.sshj.userauth.UserAuthException; <del>import net.schmizz.sshj.xfer.scp.SCPFileTransfer; <ide> import se.kth.karamel.backend.LogService; <ide> import se.kth.karamel.backend.running.model.ClusterRuntime; <ide> import se.kth.karamel.backend.running.model.Failure; <del>import se.kth.karamel.backend.running.model.tasks.RunRecipeTask; <ide> <ide> /** <ide> * <ide> */ <ide> public class SshMachine implements Runnable { <ide> <del> static { <del> Security.addProvider(new BouncyCastleProvider()); <del> } <del> <del> private static final Logger logger = Logger.getLogger(SshMachine.class); <del> private final MachineRuntime machineEntity; <del> private final String serverPubKey; <del> private final String serverPrivateKey; <del> private SSHClient client; <del> private long lastHeartbeat = 0; <del> private final BlockingQueue<Task> taskQueue = new ArrayBlockingQueue<>(Settings.MACHINES_TASKQUEUE_SIZE); <del> private boolean stopping = false; <del> private SshShell shell; <del> private final ConcurrentHashMap<RunRecipeTask, JsonArray> resultsMap = new ConcurrentHashMap<>(); <del> <del> public SshMachine(MachineRuntime machineEntity, String serverPubKey, String serverPrivateKey) { <del> this.machineEntity = machineEntity; <del> this.serverPubKey = serverPubKey; <del> this.serverPrivateKey = serverPrivateKey; <del> this.shell = new SshShell(serverPrivateKey, serverPubKey, machineEntity.getPublicIp(), machineEntity.getSshUser(), machineEntity.getSshPort()); <del> } <del> <del> public MachineRuntime getMachineEntity() { <del> return machineEntity; <del> } <del> <del> public SshShell getShell() { <del> return shell; <del> } <del> <del> public void setStopping(boolean stopping) { <del> this.stopping = stopping; <del> } <del> <del> public void pause() { <del> if (machineEntity.getTasksStatus().ordinal() < MachineRuntime.TasksStatus.PAUSING.ordinal()) { <del> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSING, null, null); <add> static { <add> Security.addProvider(new BouncyCastleProvider()); <add> } <add> <add> private static final Logger logger = Logger.getLogger(SshMachine.class); <add> private final MachineRuntime machineEntity; <add> private final String serverPubKey; <add> private final String serverPrivateKey; <add> private SSHClient client; <add> private long lastHeartbeat = 0; <add> private final BlockingQueue<Task> taskQueue = new ArrayBlockingQueue<>(Settings.MACHINES_TASKQUEUE_SIZE); <add> private boolean stopping = false; <add> private SshShell shell; <add> <add> public SshMachine(MachineRuntime machineEntity, String serverPubKey, String serverPrivateKey) { <add> this.machineEntity = machineEntity; <add> this.serverPubKey = serverPubKey; <add> this.serverPrivateKey = serverPrivateKey; <add> this.shell = new SshShell(serverPrivateKey, serverPubKey, machineEntity.getPublicIp(), machineEntity.getSshUser(), machineEntity.getSshPort()); <add> } <add> <add> public MachineRuntime getMachineEntity() { <add> return machineEntity; <add> } <add> <add> public SshShell getShell() { <add> return shell; <add> } <add> <add> public void setStopping(boolean stopping) { <add> this.stopping = stopping; <add> } <add> <add> public void pause() { <add> if (machineEntity.getTasksStatus().ordinal() < MachineRuntime.TasksStatus.PAUSING.ordinal()) { <add> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSING, null, null); <add> } <add> } <add> <add> public void resume() { <add> if (machineEntity.getTasksStatus() != MachineRuntime.TasksStatus.FAILED) { <add> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.ONGOING, null, null); <add> } <add> } <add> <add> @Override <add> public void run() { <add> logger.info(String.format("Started SSH_Machine to '%s' d'-'", machineEntity.getId())); <add> try { <add> while (true && !stopping) { <add> try { <add> if (machineEntity.getLifeStatus() == MachineRuntime.LifeStatus.CONNECTED <add> && machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.ONGOING) { <add> Task task = null; <add> try { <add> logger.debug("Going to take a task from the queue"); <add> task = taskQueue.take(); <add> logger.debug(String.format("Task was taken from the queue.. '%s'", task.getName())); <add> runTask(task); <add> } catch (InterruptedException ex) { <add> if (stopping) { <add> logger.info(String.format("Stopping SSH_Machine to '%s'", machineEntity.getId())); <add> return; <add> } else { <add> logger.error("Got interrupted without having recieved stopping signal"); <add> } <add> } <add> } else { <add> if (machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.PAUSING) { <add> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSED, null, null); <add> } <add> try { <add> Thread.sleep(Settings.MACHINE_TASKRUNNER_BUSYWAITING_INTERVALS); <add> } catch (InterruptedException ex) { <add> if (!stopping) { <add> logger.error("Got interrupted without having recieved stopping signal"); <add> } <add> } <add> } <add> } catch (Exception e) { <add> logger.error("", e); <ide> } <del> } <del> <del> public void resume() { <del> if (machineEntity.getTasksStatus() != MachineRuntime.TasksStatus.FAILED) { <del> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.ONGOING, null, null); <add> } <add> } finally { <add> disconnect(); <add> } <add> } <add> <add> public void enqueue(Task task) throws KaramelException { <add> logger.debug(String.format("Queuing '%s'", task.toString())); <add> try { <add> taskQueue.put(task); <add> task.queued(); <add> } catch (InterruptedException ex) { <add> String message = String.format("Couldn't queue task '%s' on machine '%s'", task.getName(), machineEntity.getId()); <add> task.failed(message); <add> throw new KaramelException(message, ex); <add> } <add> } <add> <add> private void runTask(Task task) { <add> try { <add> task.started(); <add> List<ShellCommand> commands = task.getCommands(); <add> <add> for (ShellCommand cmd : commands) { <add> if (cmd.getStatus() != ShellCommand.Status.DONE) { <add> runSshCmd(cmd, task); <add> if (cmd.getStatus() != ShellCommand.Status.DONE) { <add> task.failed(String.format("Incompleted command '%s", cmd.getCmdStr())); <add> break; <add> } <ide> } <del> } <del> <del> @Override <del> public void run() { <del> logger.info(String.format("Started SSH_Machine to '%s' d'-'", machineEntity.getId())); <add> } <add> if (task.getStatus() == Status.ONGOING) { <add> task.succeed(); <add> } <add> } catch (Exception ex) { <add> task.failed(ex.getMessage()); <add> } <add> } <add> <add> private void runSshCmd(ShellCommand shellCommand, Task task) { <add> shellCommand.setStatus(ShellCommand.Status.ONGOING); <add> Session session = null; <add> try { <add> logger.info(machineEntity.getId() + " => " + shellCommand.getCmdStr()); <add> <add> session = client.startSession(); <add> Session.Command cmd = session.exec(shellCommand.getCmdStr()); <add> cmd.join(60 * 24, TimeUnit.MINUTES); <add> updateHeartbeat(); <add> if (cmd.getExitStatus() != 0) { <add> shellCommand.setStatus(ShellCommand.Status.FAILED); <add> } else { <add> shellCommand.setStatus(ShellCommand.Status.DONE); <add> } <add> SequenceInputStream sequenceInputStream = new SequenceInputStream(cmd.getInputStream(), cmd.getErrorStream()); <add> LogService.serializeTaskLog(task, machineEntity.getPublicIp(), sequenceInputStream); <add> <add> } catch (ConnectionException | TransportException ex) { <add> if (getMachineEntity().getGroup().getCluster().getPhase() != ClusterRuntime.ClusterPhases.PURGING) { <add> logger.error(String.format("Couldn't excecute command on client '%s' ", machineEntity.getId()), ex); <add> } <add> } finally { <add> if (session != null) { <ide> try { <del> while (true && !stopping) { <del> try { <del> if (machineEntity.getLifeStatus() == MachineRuntime.LifeStatus.CONNECTED <del> && machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.ONGOING) { <del> Task task = null; <del> try { <del> logger.debug("Going to take a task from the queue"); <del> task = taskQueue.take(); <del> logger.debug(String.format("Task was taken from the queue.. '%s'", task.getName())); <del> JsonArray res = runTask(task); <del> if (res != null) { <del> RunRecipeTask rrt = (RunRecipeTask) task; <del> // TODO - how to pass on return values to update the dag.... <del> resultsMap.put(rrt, res); <del> } <del> <del> } catch (InterruptedException ex) { <del> if (stopping) { <del> logger.info(String.format("Stopping SSH_Machine to '%s'", machineEntity.getId())); <del> return; <del> } else { <del> logger.error("Got interrupted without having recieved stopping signal"); <del> } <del> } <del> } else { <del> if (machineEntity.getTasksStatus() == MachineRuntime.TasksStatus.PAUSING) { <del> machineEntity.setTasksStatus(MachineRuntime.TasksStatus.PAUSED, null, null); <del> } <del> try { <del> Thread.sleep(Settings.MACHINE_TASKRUNNER_BUSYWAITING_INTERVALS); <del> } catch (InterruptedException ex) { <del> if (!stopping) { <del> logger.error("Got interrupted without having recieved stopping signal"); <del> } <del> } <del> } <del> } catch (Exception e) { <del> logger.error("", e); <del> } <del> } <del> } finally { <del> disconnect(); <add> session.close(); <add> } catch (TransportException | ConnectionException ex) { <add> logger.error(String.format("Couldn't close ssh session to '%s' ", machineEntity.getId()), ex); <ide> } <del> } <del> <del> public void enqueue(Task task) throws KaramelException { <del> logger.debug(String.format("Queuing '%s'", task.toString())); <del> try { <del> taskQueue.put(task); <del> task.queued(); <del> } catch (InterruptedException ex) { <del> String message = String.format("Couldn't queue task '%s' on machine '%s'", task.getName(), machineEntity.getId()); <del> task.failed(message); <del> throw new KaramelException(message, ex); <del> } <del> } <del> <del> private synchronized JsonArray runTask(Task task) { <del> JsonArray res = null; <del> try { <del> task.started(); <del> List<ShellCommand> commands = task.getCommands(); <del> <del> for (ShellCommand cmd : commands) { <del> if (cmd.getStatus() != ShellCommand.Status.DONE) { <del> res = runSshCmd(cmd, task); <del> if (cmd.getStatus() != ShellCommand.Status.DONE) { <del> task.failed(String.format("Incompleted command '%s", cmd.getCmdStr())); <del> break; <del> } <del> } <del> } <del> if (task.getStatus() == Status.ONGOING) { <del> task.succeed(); <del> } <del> } catch (Exception ex) { <del> task.failed(ex.getMessage()); <del> } <del> return res; <del> } <del> <del> private synchronized JsonArray runSshCmd(ShellCommand shellCommand, Task task) { <del> shellCommand.setStatus(ShellCommand.Status.ONGOING); <del> Session session = null; <del> JsonArray res = null; <del> try { <del> //SesshionChannle logs the same thing <del> logger.info(machineEntity.getId() + " => " + shellCommand.getCmdStr()); <del> <del> session = client.startSession(); <del> Session.Command cmd = session.exec(shellCommand.getCmdStr()); <del> cmd.join(60 * 24, TimeUnit.MINUTES); <del> updateHeartbeat(); <del> if (cmd.getExitStatus() != 0) { <del> shellCommand.setStatus(ShellCommand.Status.FAILED); <del> } else { <del> shellCommand.setStatus(ShellCommand.Status.DONE); <del> if (task instanceof RunRecipeTask) { <del> RunRecipeTask rrt = (RunRecipeTask) task; <del> try { <del> JsonArray results = downloadResultsScp(rrt.getCookbookName(), rrt.getRecipeCanonicalName()); <del> rrt.setResults(results); <del> res = results; <del> } catch (JsonParseException p) { <del> logger.error("Bug in Chef Cookbook - Results were not a valid json document: " <del> + rrt.getCookbookName()+ "::" + rrt.getRecipeCanonicalName()); <del> rrt.setResults(null); <del> } catch (IOException e) { <del> logger.error("Possible network problem. No results were able to be downloaded for: " <del> + rrt.getCookbookName()+ "::" + rrt.getRecipeCanonicalName()); <del> rrt.setResults(null); <del> } <del> } <del> <del> } <del> <del> SequenceInputStream sequenceInputStream = new SequenceInputStream(cmd.getInputStream(), cmd.getErrorStream()); <del> LogService.serializeTaskLog(task, machineEntity.getPublicIp(), sequenceInputStream); <del> <del> } catch (ConnectionException | TransportException ex) { <del> if (getMachineEntity().getGroup().getCluster().getPhase() != ClusterRuntime.ClusterPhases.PURGING) { <del> logger.error(String.format("Couldn't excecute command on client '%s' ", machineEntity.getId()), ex); <del> } <del> } finally { <del> if (session != null) { <del> try { <del> client.close(); <del> } catch (IOException ex) { <del> } <del> } <del> } <del> return res; <del> } <del> <del> private boolean connect() throws KaramelException { <del> try { <del> KeyProvider keys = null; <del> client = new SSHClient(); <del> client.addHostKeyVerifier(new PromiscuousVerifier()); <del> client.setConnectTimeout(Settings.SSH_CONNECTION_TIMEOUT); <del> client.setTimeout(Settings.SSH_SESSION_TIMEOUT); <del> keys = client.loadKeys(serverPrivateKey, serverPubKey, null); <del> logger.info(String.format("connecting to '%s'...", machineEntity.getId())); <del> try { <del> client.connect(machineEntity.getPublicIp(), machineEntity.getSshPort()); <del> } catch (IOException ex) { <del> logger.warn(String.format("Opps!! coudln't connect to '%s' :@", machineEntity.getId())); <del> logger.debug(ex); <del> } <del> if (client.isConnected()) { <del> logger.info(String.format("Yey!! connected to '%s' ^-^", machineEntity.getId())); <del> machineEntity.getGroup().getCluster().resolveFailure(Failure.hash(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp())); <del> client.authPublickey(machineEntity.getSshUser(), keys); <del> machineEntity.setLifeStatus(MachineRuntime.LifeStatus.CONNECTED); <del> return true; <del> } else { <del> logger.error(String.format("Mehh!! no connection to '%s', is the port '%d' open?", machineEntity.getId(), machineEntity.getSshPort())); <del> machineEntity.setLifeStatus(MachineRuntime.LifeStatus.UNREACHABLE); <del> return false; <del> } <del> } catch (UserAuthException ex) { <del> String message = "Issue for using ssh keys, make sure you keypair is not password protected.."; <del> KaramelException exp = new KaramelException(message, ex); <del> machineEntity.getGroup().getCluster().issueFailure(new Failure(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp(), message)); <del> throw exp; <del> } catch (IOException e) { <del> throw new KaramelException(e); <del> } <del> } <del> <del> public void disconnect() { <del> logger.info(String.format("Closing ssh session to '%s'", machineEntity.getId())); <del> try { <del> if (client != null && client.isConnected()) { <del> client.close(); <del> } <del> } catch (IOException ex) { <del> } <del> } <del> <del> /** <del> * <del> * @param rrt <del> * @return null if the Map doesn't contain a result for this RunRecipeTask, <del> * otherwise a JsonArray <del> */ <del> public JsonArray getRecipeResult(RunRecipeTask rrt) { <del> JsonArray res = resultsMap.get(rrt); <del> if (res != null) { <del> // remove result from Map if found <del> resultsMap.remove(rrt); <del> } <del> return res; <del> } <del> <del> /** <del> * http://unix.stackexchange.com/questions/136165/java-code-to-copy-files-from-one-linux-machine-to-another-linux-machine <del> * <del> * @param session <del> * @param cookbook <del> * @param recipe <del> */ <del> private synchronized JsonArray downloadResultsScp(String cookbook, String recipe) throws IOException { <del> String remoteFile = "~/" + cookbook + "__" + recipe + ".out"; <del> SCPFileTransfer scp = client.newSCPFileTransfer(); <del> String localResultsFile = Settings.KARAMEL_TMP_PATH + File.separator + cookbook + "__" + recipe + ".out"; <del> File f = new File(localResultsFile); <del> // TODO - should move this to some initialization method <del> f.mkdirs(); <del> if (f.exists()) { <del> f.delete(); <del> } <del> // TODO: error checking here... <del> scp.download(remoteFile, localResultsFile); <del> JsonReader reader = new JsonReader(new FileReader(localResultsFile)); <del> JsonParser jsonParser = new JsonParser(); <del> return jsonParser.parse(reader).getAsJsonArray(); <del> } <del> <del> public boolean ping() throws KaramelException { <del> if (lastHeartbeat < System.currentTimeMillis() - Settings.SSH_PING_INTERVAL) { <del> if (client != null && client.isConnected()) { <del> updateHeartbeat(); <del> return true; <del> } else { <del> return connect(); <del> } <del> } else { <del> return true; <del> } <del> <del> } <del> <del> private void updateHeartbeat() { <del> lastHeartbeat = System.currentTimeMillis(); <del> } <add> } <add> } <add> } <add> <add> private boolean connect() throws KaramelException { <add> try { <add> KeyProvider keys = null; <add> client = new SSHClient(); <add> client.addHostKeyVerifier(new PromiscuousVerifier()); <add> client.setConnectTimeout(Settings.SSH_CONNECTION_TIMEOUT); <add> client.setTimeout(Settings.SSH_SESSION_TIMEOUT); <add> keys = client.loadKeys(serverPrivateKey, serverPubKey, null); <add> logger.info(String.format("connecting to '%s'...", machineEntity.getId())); <add> try { <add> client.connect(machineEntity.getPublicIp(), machineEntity.getSshPort()); <add> } catch (IOException ex) { <add> logger.warn(String.format("Opps!! coudln't connect to '%s' :@", machineEntity.getId())); <add> logger.debug(ex); <add> } <add> if (client.isConnected()) { <add> logger.info(String.format("Yey!! connected to '%s' ^-^", machineEntity.getId())); <add> machineEntity.getGroup().getCluster().resolveFailure(Failure.hash(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp())); <add> client.authPublickey(machineEntity.getSshUser(), keys); <add> machineEntity.setLifeStatus(MachineRuntime.LifeStatus.CONNECTED); <add> return true; <add> } else { <add> logger.error(String.format("Mehh!! no connection to '%s', is the port '%d' open?", machineEntity.getId(), machineEntity.getSshPort())); <add> machineEntity.setLifeStatus(MachineRuntime.LifeStatus.UNREACHABLE); <add> return false; <add> } <add> } catch (UserAuthException ex) { <add> String message = "Issue for using ssh keys, make sure you keypair is not password protected.."; <add> KaramelException exp = new KaramelException(message, ex); <add> machineEntity.getGroup().getCluster().issueFailure(new Failure(Failure.Type.SSH_KEY_NOT_AUTH, machineEntity.getPublicIp(), message)); <add> throw exp; <add> } catch (IOException e) { <add> throw new KaramelException(e); <add> } <add> } <add> <add> public void disconnect() { <add> logger.info(String.format("Closing ssh session to '%s'", machineEntity.getId())); <add> try { <add> if (client != null && client.isConnected()) { <add> client.close(); <add> } <add> } catch (IOException ex) { <add> } <add> } <add> <add> public boolean ping() throws KaramelException { <add> if (lastHeartbeat < System.currentTimeMillis() - Settings.SSH_PING_INTERVAL) { <add> if (client != null && client.isConnected()) { <add> updateHeartbeat(); <add> return true; <add> } else { <add> return connect(); <add> } <add> } else { <add> return true; <add> } <add> } <add> <add> private void updateHeartbeat() { <add> lastHeartbeat = System.currentTimeMillis(); <add> } <ide> }
Java
apache-2.0
089e37af7bcc557a7bc0fe5587cb6b72419f8c16
0
GwtMaterialDesign/gwt-material-addins,GwtMaterialDesign/gwt-material-addins,GwtMaterialDesign/gwt-material-addins
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2016 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.addins.client.fileuploader; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.DOM; import gwt.material.design.addins.client.MaterialAddins; import gwt.material.design.addins.client.base.constants.AddinsCssName; import gwt.material.design.addins.client.fileuploader.base.HasFileUpload; import gwt.material.design.addins.client.fileuploader.base.UploadFile; import gwt.material.design.addins.client.fileuploader.base.UploadResponse; import gwt.material.design.addins.client.fileuploader.constants.FileMethod; import gwt.material.design.addins.client.fileuploader.events.*; import gwt.material.design.addins.client.fileuploader.js.Dropzone; import gwt.material.design.addins.client.fileuploader.js.File; import gwt.material.design.addins.client.fileuploader.js.JsFileUploaderOptions; import gwt.material.design.client.MaterialDesignBase; import gwt.material.design.client.base.MaterialWidget; import gwt.material.design.client.constants.CssName; import gwt.material.design.client.constants.Display; import gwt.material.design.client.events.*; import gwt.material.design.client.ui.MaterialToast; import gwt.material.design.jquery.client.api.JQueryElement; import java.util.Date; import static gwt.material.design.jquery.client.api.JQuery.$; //@formatter:off /** * Custom file uploader with Dnd support with the help of dropzone.js. It has multiple * feature just like the GWT File Uploader core widget. * <p> * <h3>XML Namespace Declaration</h3> * <pre> * {@code * xmlns:ma='urn:import:gwt.material.design.addins.client' * } * </pre> * <p> * <h3>UiBinder Usage:</h3> * <pre> * {@code * <ma:fileuploader.MaterialFileUploader url="/file/upload"/> * } * </pre> * * @author kevzlou7979 * @see <a href="http://gwtmaterialdesign.github.io/gwt-material-demo/#fileuploader">File Uploader</a> */ //@formatter:on public class MaterialFileUploader extends MaterialWidget implements HasFileUpload<UploadFile> { static { if (MaterialAddins.isDebug()) { MaterialDesignBase.injectDebugJs(MaterialFileUploaderDebugClientBundle.INSTANCE.dropzoneJsDebug()); MaterialDesignBase.injectCss(MaterialFileUploaderDebugClientBundle.INSTANCE.dropzoneCssDebug()); } else { MaterialDesignBase.injectJs(MaterialFileUploaderClientBundle.INSTANCE.dropzoneJs()); MaterialDesignBase.injectCss(MaterialFileUploaderClientBundle.INSTANCE.dropzoneCss()); } } private boolean preview = true; private boolean initialize = false; private int totalFiles = 0; private String globalResponse = ""; private Dropzone uploader; private JsFileUploaderOptions options; private MaterialUploadPreview uploadPreview = new MaterialUploadPreview(); public MaterialFileUploader() { super(Document.get().createDivElement(), AddinsCssName.FILEUPLOADER); setId(AddinsCssName.ZDROP); add(uploadPreview); options = getDefaultOptions(); } public MaterialFileUploader(String url, FileMethod method) { this(); setUrl(url); setMethod(method); } public MaterialFileUploader(String url, FileMethod method, int maxFileSize, String acceptedFiles) { this(url, method); setMaxFiles(maxFileSize); setAcceptedFiles(acceptedFiles); } protected JsFileUploaderOptions getDefaultOptions() { JsFileUploaderOptions options = new JsFileUploaderOptions(); options.clickable = ""; options.autoQueue = true; options.maxFilesize = 20; options.maxFiles = 100; options.method = FileMethod.POST.getCssName(); options.withCredentials = false; options.acceptedFiles = ""; return options; } @Override protected void onLoad() { super.onLoad(); if (!isInitialize()) { initDropzone(); setInitialize(true); } } public void initDropzone() { if (getWidgetCount() > 1) { String previews = DOM.createUniqueId(); uploadPreview.getUploadCollection().setId(previews); if (options.clickable.isEmpty()) { String clickable = DOM.createUniqueId(); if (getWidget(1) instanceof MaterialUploadLabel) { MaterialUploadLabel label = (MaterialUploadLabel) getWidget(1); label.getIcon().setId(clickable); } else { getWidget(1).getElement().setId(clickable); } setClickable(clickable); } if (!isPreview()) { uploadPreview.setDisplay(Display.NONE); } initDropzone(getElement(), uploadPreview.getUploadCollection().getItem().getElement(), previews, uploadPreview.getElement(), uploadPreview.getUploadHeader().getUploadedFiles().getElement()); }else { GWT.log("You don't have any child widget to use as a upload label"); } } /** * Intialize the dropzone component with element and form url to provide a * dnd feature for the file upload * * @param e */ protected void initDropzone(Element e, Element template, String previews, Element uploadPreview, Element uploadedFiles ) { JQueryElement previewNode = $(template); previewNode.asElement().setId(""); String previewTemplate = previewNode.parent().html(); options.previewTemplate = previewTemplate; options.previewsContainer = "#" + previews; uploader = new Dropzone(e, options); uploader.on("drop", event -> { fireDropEvent(); if (preview) { $(e).removeClass(CssName.ACTIVE); } return true; }); uploader.on("dragstart", event -> { DragStartEvent.fire(this); return true; }); uploader.on("dragend", event -> { DragEndEvent.fire(this); return true; }); uploader.on("dragenter", event -> { DragEnterEvent.fire(this, null); if (preview) { $(e).addClass(CssName.ACTIVE); } return true; }); uploader.on("dragover", event -> { DragOverEvent.fire(this); return true; }); uploader.on("dragleave", event -> { DragLeaveEvent.fire(this, null); if (preview) { $(e).removeClass(CssName.ACTIVE); } return true; }); uploader.on("addedfile", file -> { AddedFileEvent.fire(this, convertUploadFile(file)); totalFiles++; if (isPreview()) { $(uploadPreview).css("visibility", "visible"); $(uploadedFiles).html("Uploaded files " + totalFiles); getUploadPreview().getUploadHeader().getProgress().setPercent(0); } }); uploader.on("removedfile", file -> { RemovedFileEvent.fire(this, convertUploadFile(file)); totalFiles -= 1; $(uploadedFiles).html("Uploaded files " + totalFiles); }); uploader.on("error", (file, response) -> { String code = "200"; if (file.xhr != null) { code = file.xhr.status; } if (response.indexOf("401") >= 0) { response = "Unautharized. Probably Your's session expired. Log in and try again."; globalResponse = response; UnauthorizedEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); } if (response.indexOf("404") >= 0) { response = "There's a problem uploading your file."; globalResponse = response; } if (response.indexOf("500") >= 0) { response = "There's a problem uploading your file."; globalResponse = response; } $(file.previewElement).find("#error-message").html(response); ErrorEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); }); uploader.on("totaluploadprogress", (progress, file, response) -> { TotalUploadProgressEvent.fire(this, progress); if (isPreview()) { getUploadPreview().getUploadHeader().getProgress().setPercent(progress); } }); uploader.on("uploadprogress", (progress, file, response) -> { CurrentUploadProgressEvent.fire(this, progress); if ($this != null) { $this.find(".progress .determinate").css("width", progress + "%"); } }); uploader.on("sending", file -> { SendingEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText)); }); uploader.on("success", (file, response) -> { globalResponse = response; SuccessEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); }); uploader.on("complete", file -> { CompleteEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, globalResponse)); }); uploader.on("canceled", file -> { CanceledEvent.fire(this, convertUploadFile(file)); }); uploader.on("maxfilesreached", file -> { MaxFilesReachedEvent.fire(this, convertUploadFile(file)); }); uploader.on("maxfilesexceeded", file -> { MaterialToast.fireToast("You have reached the maximum files to be uploaded."); MaxFilesExceededEvent.fire(this, convertUploadFile(file)); }); } /** * Converts a Native File Object to Upload File object */ protected UploadFile convertUploadFile(File file) { Date lastModifiedDate = new Date(); // Avoid parsing error on last modified date if (file.lastModifiedDate != null && !file.lastModifiedDate.isEmpty()) { lastModifiedDate = new Date(file.lastModifiedDate); } return new UploadFile(file.name, lastModifiedDate, Double.parseDouble(file.size), file.type); } /** * Get the form url. */ public String getUrl() { return options.url; } /** * Set the form url e.g /file/post. */ public void setUrl(String url) { options.url = url; } /** * Get the maximum file size value of the uploader. */ public int getMaxFileSize() { return options.maxFilesize; } /** * Set the maximum file size of the uploader, default 20(MB). */ public void setMaxFileSize(int maxFileSize) { options.maxFilesize = maxFileSize; } /** * Check whether it's auto queue or not. */ public boolean isAutoQueue() { return options.autoQueue; } /** * Set the auto queue boolean value. */ public void setAutoQueue(boolean autoQueue) { options.autoQueue = autoQueue; } /** * Get the method param of file uploader. */ public FileMethod getMethod() { return FileMethod.fromStyleName(options.method); } /** * Set the method param of file upload (POST or PUT), default POST. */ public void setMethod(FileMethod method) { options.method = method.getCssName(); } /** * Get the max number of files. */ public int getMaxFiles() { return options.maxFiles; } /** * Set the max number of files. * Default 100 but if you want to accept only one file just set the max file to 1. * If the number of files you upload exceeds, the event maxfilesexceeded will be called. */ public void setMaxFiles(int maxFiles) { options.maxFiles = maxFiles; } /** * Check whether it's withCredentials or not. */ public boolean isWithCredentials() { return options.withCredentials; } /** * Set the withCredentials boolean value. */ public void setWithCredentials(boolean withCredentials) { options.withCredentials = withCredentials; } /** * Get the accepted file string. */ public String getAcceptedFiles() { return options.acceptedFiles; } /** * Set the default implementation of accept checks the file's mime type or extension against this list. * This is a comma separated list of mime types or file extensions. Eg.: image/*,application/pdf,.psd. */ public void setAcceptedFiles(String acceptedFiles) { options.acceptedFiles = acceptedFiles; } public void fireDropEvent() { DropEvent.fire(this, null); } @Override public HandlerRegistration addAddedFileHandler(final AddedFileEvent.AddedFileHandler<UploadFile> handler) { return addHandler(new AddedFileEvent.AddedFileHandler<UploadFile>() { @Override public void onAddedFile(AddedFileEvent<UploadFile> event) { if (isEnabled()) { handler.onAddedFile(event); } } }, AddedFileEvent.getType()); } @Override public HandlerRegistration addRemovedFileHandler(final RemovedFileEvent.RemovedFileHandler<UploadFile> handler) { return addHandler(new RemovedFileEvent.RemovedFileHandler<UploadFile>() { @Override public void onRemovedFile(RemovedFileEvent<UploadFile> event) { if (isEnabled()) { handler.onRemovedFile(event); } } }, RemovedFileEvent.getType()); } @Override public HandlerRegistration addErrorHandler(final ErrorEvent.ErrorHandler<UploadFile> handler) { return addHandler(new ErrorEvent.ErrorHandler<UploadFile>() { @Override public void onError(ErrorEvent<UploadFile> event) { if (isEnabled()) { handler.onError(event); } } }, ErrorEvent.getType()); } @Override public HandlerRegistration addUnauthorizedHandler(final UnauthorizedEvent.UnauthorizedHandler<UploadFile> handler) { return addHandler(new UnauthorizedEvent.UnauthorizedHandler<UploadFile>() { @Override public void onUnauthorized(UnauthorizedEvent<UploadFile> event) { if (isEnabled()) { handler.onUnauthorized(event); } } }, UnauthorizedEvent.getType()); } @Override public HandlerRegistration addTotalUploadProgressHandler(final TotalUploadProgressEvent.TotalUploadProgressHandler handler) { return addHandler(event -> { if (isEnabled()) { handler.onTotalUploadProgress(event); } }, TotalUploadProgressEvent.TYPE); } @Override public HandlerRegistration addCurrentUploadProgressHandler(CurrentUploadProgressEvent.CurrentUploadProgressHandler handler) { return addHandler(event -> { if (isEnabled()) { handler.onCurrentUploadProgress(event); } }, CurrentUploadProgressEvent.TYPE); } @Override public HandlerRegistration addSendingHandler(final SendingEvent.SendingHandler<UploadFile> handler) { return addHandler(new SendingEvent.SendingHandler<UploadFile>() { @Override public void onSending(SendingEvent<UploadFile> event) { if (isEnabled()) { handler.onSending(event); } } }, SendingEvent.getType()); } @Override public HandlerRegistration addSuccessHandler(final SuccessEvent.SuccessHandler<UploadFile> handler) { return addHandler(new SuccessEvent.SuccessHandler<UploadFile>() { @Override public void onSuccess(SuccessEvent<UploadFile> event) { if (isEnabled()) { handler.onSuccess(event); } } }, SuccessEvent.getType()); } @Override public HandlerRegistration addCompleteHandler(final CompleteEvent.CompleteHandler<UploadFile> handler) { return addHandler(new CompleteEvent.CompleteHandler<UploadFile>() { @Override public void onComplete(CompleteEvent<UploadFile> event) { if (isEnabled()) { handler.onComplete(event); } } }, CompleteEvent.getType()); } @Override public HandlerRegistration addCancelHandler(final CanceledEvent.CanceledHandler<UploadFile> handler) { return addHandler(new CanceledEvent.CanceledHandler<UploadFile>() { @Override public void onCanceled(CanceledEvent<UploadFile> event) { if (isEnabled()) { handler.onCanceled(event); } } }, CanceledEvent.getType()); } @Override public HandlerRegistration addMaxFilesReachHandler(final MaxFilesReachedEvent.MaxFilesReachedHandler<UploadFile> handler) { return addHandler(new MaxFilesReachedEvent.MaxFilesReachedHandler<UploadFile>() { @Override public void onMaxFilesReached(MaxFilesReachedEvent<UploadFile> event) { if (isEnabled()) { handler.onMaxFilesReached(event); } } }, MaxFilesReachedEvent.getType()); } @Override public HandlerRegistration addMaxFilesExceededHandler(final MaxFilesExceededEvent.MaxFilesExceededHandler<UploadFile> handler) { return addHandler(new MaxFilesExceededEvent.MaxFilesExceededHandler<UploadFile>() { @Override public void onMaxFilesExceeded(MaxFilesExceededEvent<UploadFile> event) { if (isEnabled()) { handler.onMaxFilesExceeded(event); } } }, MaxFilesExceededEvent.getType()); } public String getClickable() { return options.clickable.length()==0?options.clickable:options.clickable.substring(1); } public void setClickable(String clickable) { options.clickable = "#"+clickable; } public boolean isPreview() { return preview; } public void setPreview(boolean preview) { this.preview = preview; } /** * Check whether the component has been initialized. */ public boolean isInitialize() { return initialize; } /** * Set the initialization of the component. */ public void setInitialize(boolean initialize) { this.initialize = initialize; } public void reset() { uploader.removeAllFiles(); } public MaterialUploadPreview getUploadPreview() { return uploadPreview; } }
src/main/java/gwt/material/design/addins/client/fileuploader/MaterialFileUploader.java
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2016 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.addins.client.fileuploader; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.DOM; import gwt.material.design.addins.client.MaterialAddins; import gwt.material.design.addins.client.base.constants.AddinsCssName; import gwt.material.design.addins.client.fileuploader.base.HasFileUpload; import gwt.material.design.addins.client.fileuploader.base.UploadFile; import gwt.material.design.addins.client.fileuploader.base.UploadResponse; import gwt.material.design.addins.client.fileuploader.constants.FileMethod; import gwt.material.design.addins.client.fileuploader.events.*; import gwt.material.design.addins.client.fileuploader.js.Dropzone; import gwt.material.design.addins.client.fileuploader.js.File; import gwt.material.design.addins.client.fileuploader.js.JsFileUploaderOptions; import gwt.material.design.client.MaterialDesignBase; import gwt.material.design.client.base.MaterialWidget; import gwt.material.design.client.constants.CssName; import gwt.material.design.client.constants.Display; import gwt.material.design.client.events.*; import gwt.material.design.client.ui.MaterialToast; import gwt.material.design.jquery.client.api.JQueryElement; import java.util.Date; import static gwt.material.design.jquery.client.api.JQuery.$; //@formatter:off /** * Custom file uploader with Dnd support with the help of dropzone.js. It has multiple * feature just like the GWT File Uploader core widget. * <p> * <h3>XML Namespace Declaration</h3> * <pre> * {@code * xmlns:ma='urn:import:gwt.material.design.addins.client' * } * </pre> * <p> * <h3>UiBinder Usage:</h3> * <pre> * {@code * <ma:fileuploader.MaterialFileUploader url="/file/upload"/> * } * </pre> * * @author kevzlou7979 * @see <a href="http://gwtmaterialdesign.github.io/gwt-material-demo/#fileuploader">File Uploader</a> */ //@formatter:on public class MaterialFileUploader extends MaterialWidget implements HasFileUpload<UploadFile> { static { if (MaterialAddins.isDebug()) { MaterialDesignBase.injectDebugJs(MaterialFileUploaderDebugClientBundle.INSTANCE.dropzoneJsDebug()); MaterialDesignBase.injectCss(MaterialFileUploaderDebugClientBundle.INSTANCE.dropzoneCssDebug()); } else { MaterialDesignBase.injectJs(MaterialFileUploaderClientBundle.INSTANCE.dropzoneJs()); MaterialDesignBase.injectCss(MaterialFileUploaderClientBundle.INSTANCE.dropzoneCss()); } } private boolean preview = true; private boolean initialize = false; private int totalFiles = 0; private String globalResponse = ""; private Dropzone uploader; private JsFileUploaderOptions options; private MaterialUploadPreview uploadPreview = new MaterialUploadPreview(); public MaterialFileUploader() { super(Document.get().createDivElement(), AddinsCssName.FILEUPLOADER); setId(AddinsCssName.ZDROP); add(uploadPreview); options = getDefaultOptions(); } public MaterialFileUploader(String url, FileMethod method) { this(); setUrl(url); setMethod(method); } public MaterialFileUploader(String url, FileMethod method, int maxFileSize, String acceptedFiles) { this(url, method); setMaxFiles(maxFileSize); setAcceptedFiles(acceptedFiles); } protected JsFileUploaderOptions getDefaultOptions() { JsFileUploaderOptions options = new JsFileUploaderOptions(); options.clickable = ""; options.autoQueue = true; options.maxFilesize = 20; options.maxFiles = 100; options.method = FileMethod.POST.getCssName(); options.withCredentials = false; options.acceptedFiles = ""; return options; } @Override protected void onLoad() { super.onLoad(); if (!isInitialize()) { initDropzone(); setInitialize(true); } } public void initDropzone() { String previews = DOM.createUniqueId(); uploadPreview.getUploadCollection().setId(previews); if (options.clickable.isEmpty()) { String clickable = DOM.createUniqueId(); if (getWidget(1) instanceof MaterialUploadLabel) { MaterialUploadLabel label = (MaterialUploadLabel) getWidget(1); label.getIcon().setId(clickable); } else { getWidget(1).getElement().setId(clickable); } setClickable(clickable); } if (!isPreview()) { uploadPreview.setDisplay(Display.NONE); } initDropzone(getElement(), uploadPreview.getUploadCollection().getItem().getElement(), previews, uploadPreview.getElement(), uploadPreview.getUploadHeader().getUploadedFiles().getElement()); } /** * Intialize the dropzone component with element and form url to provide a * dnd feature for the file upload * * @param e */ protected void initDropzone(Element e, Element template, String previews, Element uploadPreview, Element uploadedFiles ) { JQueryElement previewNode = $(template); previewNode.asElement().setId(""); String previewTemplate = previewNode.parent().html(); options.previewTemplate = previewTemplate; options.previewsContainer = "#" + previews; uploader = new Dropzone(e, options); uploader.on("drop", event -> { fireDropEvent(); if (preview) { $(e).removeClass(CssName.ACTIVE); } return true; }); uploader.on("dragstart", event -> { DragStartEvent.fire(this); return true; }); uploader.on("dragend", event -> { DragEndEvent.fire(this); return true; }); uploader.on("dragenter", event -> { DragEnterEvent.fire(this, null); if (preview) { $(e).addClass(CssName.ACTIVE); } return true; }); uploader.on("dragover", event -> { DragOverEvent.fire(this); return true; }); uploader.on("dragleave", event -> { DragLeaveEvent.fire(this, null); if (preview) { $(e).removeClass(CssName.ACTIVE); } return true; }); uploader.on("addedfile", file -> { AddedFileEvent.fire(this, convertUploadFile(file)); totalFiles++; if (isPreview()) { $(uploadPreview).css("visibility", "visible"); $(uploadedFiles).html("Uploaded files " + totalFiles); getUploadPreview().getUploadHeader().getProgress().setPercent(0); } }); uploader.on("removedfile", file -> { RemovedFileEvent.fire(this, convertUploadFile(file)); totalFiles -= 1; $(uploadedFiles).html("Uploaded files " + totalFiles); }); uploader.on("error", (file, response) -> { String code = "200"; if (file.xhr != null) { code = file.xhr.status; } if (response.indexOf("401") >= 0) { response = "Unautharized. Probably Your's session expired. Log in and try again."; globalResponse = response; UnauthorizedEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); } if (response.indexOf("404") >= 0) { response = "There's a problem uploading your file."; globalResponse = response; } if (response.indexOf("500") >= 0) { response = "There's a problem uploading your file."; globalResponse = response; } $(file.previewElement).find("#error-message").html(response); ErrorEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); }); uploader.on("totaluploadprogress", (progress, file, response) -> { TotalUploadProgressEvent.fire(this, progress); if (isPreview()) { getUploadPreview().getUploadHeader().getProgress().setPercent(progress); } }); uploader.on("uploadprogress", (progress, file, response) -> { CurrentUploadProgressEvent.fire(this, progress); if ($this != null) { $this.find(".progress .determinate").css("width", progress + "%"); } }); uploader.on("sending", file -> { SendingEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText)); }); uploader.on("success", (file, response) -> { globalResponse = response; SuccessEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, response)); }); uploader.on("complete", file -> { CompleteEvent.fire(this, convertUploadFile(file), new UploadResponse(file.xhr.status, file.xhr.statusText, globalResponse)); }); uploader.on("canceled", file -> { CanceledEvent.fire(this, convertUploadFile(file)); }); uploader.on("maxfilesreached", file -> { MaxFilesReachedEvent.fire(this, convertUploadFile(file)); }); uploader.on("maxfilesexceeded", file -> { MaterialToast.fireToast("You have reached the maximum files to be uploaded."); MaxFilesExceededEvent.fire(this, convertUploadFile(file)); }); } /** * Converts a Native File Object to Upload File object */ protected UploadFile convertUploadFile(File file) { Date lastModifiedDate = new Date(); // Avoid parsing error on last modified date if (file.lastModifiedDate != null && !file.lastModifiedDate.isEmpty()) { lastModifiedDate = new Date(file.lastModifiedDate); } return new UploadFile(file.name, lastModifiedDate, Double.parseDouble(file.size), file.type); } /** * Get the form url. */ public String getUrl() { return options.url; } /** * Set the form url e.g /file/post. */ public void setUrl(String url) { options.url = url; } /** * Get the maximum file size value of the uploader. */ public int getMaxFileSize() { return options.maxFilesize; } /** * Set the maximum file size of the uploader, default 20(MB). */ public void setMaxFileSize(int maxFileSize) { options.maxFilesize = maxFileSize; } /** * Check whether it's auto queue or not. */ public boolean isAutoQueue() { return options.autoQueue; } /** * Set the auto queue boolean value. */ public void setAutoQueue(boolean autoQueue) { options.autoQueue = autoQueue; } /** * Get the method param of file uploader. */ public FileMethod getMethod() { return FileMethod.fromStyleName(options.method); } /** * Set the method param of file upload (POST or PUT), default POST. */ public void setMethod(FileMethod method) { options.method = method.getCssName(); } /** * Get the max number of files. */ public int getMaxFiles() { return options.maxFiles; } /** * Set the max number of files. * Default 100 but if you want to accept only one file just set the max file to 1. * If the number of files you upload exceeds, the event maxfilesexceeded will be called. */ public void setMaxFiles(int maxFiles) { options.maxFiles = maxFiles; } /** * Check whether it's withCredentials or not. */ public boolean isWithCredentials() { return options.withCredentials; } /** * Set the withCredentials boolean value. */ public void setWithCredentials(boolean withCredentials) { options.withCredentials = withCredentials; } /** * Get the accepted file string. */ public String getAcceptedFiles() { return options.acceptedFiles; } /** * Set the default implementation of accept checks the file's mime type or extension against this list. * This is a comma separated list of mime types or file extensions. Eg.: image/*,application/pdf,.psd. */ public void setAcceptedFiles(String acceptedFiles) { options.acceptedFiles = acceptedFiles; } public void fireDropEvent() { DropEvent.fire(this, null); } @Override public HandlerRegistration addAddedFileHandler(final AddedFileEvent.AddedFileHandler<UploadFile> handler) { return addHandler(new AddedFileEvent.AddedFileHandler<UploadFile>() { @Override public void onAddedFile(AddedFileEvent<UploadFile> event) { if (isEnabled()) { handler.onAddedFile(event); } } }, AddedFileEvent.getType()); } @Override public HandlerRegistration addRemovedFileHandler(final RemovedFileEvent.RemovedFileHandler<UploadFile> handler) { return addHandler(new RemovedFileEvent.RemovedFileHandler<UploadFile>() { @Override public void onRemovedFile(RemovedFileEvent<UploadFile> event) { if (isEnabled()) { handler.onRemovedFile(event); } } }, RemovedFileEvent.getType()); } @Override public HandlerRegistration addErrorHandler(final ErrorEvent.ErrorHandler<UploadFile> handler) { return addHandler(new ErrorEvent.ErrorHandler<UploadFile>() { @Override public void onError(ErrorEvent<UploadFile> event) { if (isEnabled()) { handler.onError(event); } } }, ErrorEvent.getType()); } @Override public HandlerRegistration addUnauthorizedHandler(final UnauthorizedEvent.UnauthorizedHandler<UploadFile> handler) { return addHandler(new UnauthorizedEvent.UnauthorizedHandler<UploadFile>() { @Override public void onUnauthorized(UnauthorizedEvent<UploadFile> event) { if (isEnabled()) { handler.onUnauthorized(event); } } }, UnauthorizedEvent.getType()); } @Override public HandlerRegistration addTotalUploadProgressHandler(final TotalUploadProgressEvent.TotalUploadProgressHandler handler) { return addHandler(event -> { if (isEnabled()) { handler.onTotalUploadProgress(event); } }, TotalUploadProgressEvent.TYPE); } @Override public HandlerRegistration addCurrentUploadProgressHandler(CurrentUploadProgressEvent.CurrentUploadProgressHandler handler) { return addHandler(event -> { if (isEnabled()) { handler.onCurrentUploadProgress(event); } }, CurrentUploadProgressEvent.TYPE); } @Override public HandlerRegistration addSendingHandler(final SendingEvent.SendingHandler<UploadFile> handler) { return addHandler(new SendingEvent.SendingHandler<UploadFile>() { @Override public void onSending(SendingEvent<UploadFile> event) { if (isEnabled()) { handler.onSending(event); } } }, SendingEvent.getType()); } @Override public HandlerRegistration addSuccessHandler(final SuccessEvent.SuccessHandler<UploadFile> handler) { return addHandler(new SuccessEvent.SuccessHandler<UploadFile>() { @Override public void onSuccess(SuccessEvent<UploadFile> event) { if (isEnabled()) { handler.onSuccess(event); } } }, SuccessEvent.getType()); } @Override public HandlerRegistration addCompleteHandler(final CompleteEvent.CompleteHandler<UploadFile> handler) { return addHandler(new CompleteEvent.CompleteHandler<UploadFile>() { @Override public void onComplete(CompleteEvent<UploadFile> event) { if (isEnabled()) { handler.onComplete(event); } } }, CompleteEvent.getType()); } @Override public HandlerRegistration addCancelHandler(final CanceledEvent.CanceledHandler<UploadFile> handler) { return addHandler(new CanceledEvent.CanceledHandler<UploadFile>() { @Override public void onCanceled(CanceledEvent<UploadFile> event) { if (isEnabled()) { handler.onCanceled(event); } } }, CanceledEvent.getType()); } @Override public HandlerRegistration addMaxFilesReachHandler(final MaxFilesReachedEvent.MaxFilesReachedHandler<UploadFile> handler) { return addHandler(new MaxFilesReachedEvent.MaxFilesReachedHandler<UploadFile>() { @Override public void onMaxFilesReached(MaxFilesReachedEvent<UploadFile> event) { if (isEnabled()) { handler.onMaxFilesReached(event); } } }, MaxFilesReachedEvent.getType()); } @Override public HandlerRegistration addMaxFilesExceededHandler(final MaxFilesExceededEvent.MaxFilesExceededHandler<UploadFile> handler) { return addHandler(new MaxFilesExceededEvent.MaxFilesExceededHandler<UploadFile>() { @Override public void onMaxFilesExceeded(MaxFilesExceededEvent<UploadFile> event) { if (isEnabled()) { handler.onMaxFilesExceeded(event); } } }, MaxFilesExceededEvent.getType()); } public String getClickable() { return options.clickable.length()==0?options.clickable:options.clickable.substring(1); } public void setClickable(String clickable) { options.clickable = "#"+clickable; } public boolean isPreview() { return preview; } public void setPreview(boolean preview) { this.preview = preview; } /** * Check whether the component has been initialized. */ public boolean isInitialize() { return initialize; } /** * Set the initialization of the component. */ public void setInitialize(boolean initialize) { this.initialize = initialize; } public void reset() { uploader.removeAllFiles(); } public MaterialUploadPreview getUploadPreview() { return uploadPreview; } }
MaterialFileUploader - Added check to avoid IndexOutOfBounds
src/main/java/gwt/material/design/addins/client/fileuploader/MaterialFileUploader.java
MaterialFileUploader - Added check to avoid IndexOutOfBounds
<ide><path>rc/main/java/gwt/material/design/addins/client/fileuploader/MaterialFileUploader.java <ide> */ <ide> package gwt.material.design.addins.client.fileuploader; <ide> <add>import com.google.gwt.core.client.GWT; <ide> import com.google.gwt.dom.client.Document; <ide> import com.google.gwt.dom.client.Element; <ide> import com.google.gwt.event.shared.HandlerRegistration; <ide> } <ide> <ide> public void initDropzone() { <del> String previews = DOM.createUniqueId(); <del> uploadPreview.getUploadCollection().setId(previews); <del> if (options.clickable.isEmpty()) { <del> String clickable = DOM.createUniqueId(); <del> if (getWidget(1) instanceof MaterialUploadLabel) { <del> MaterialUploadLabel label = (MaterialUploadLabel) getWidget(1); <del> label.getIcon().setId(clickable); <del> } else { <del> getWidget(1).getElement().setId(clickable); <del> } <del> setClickable(clickable); <add> if (getWidgetCount() > 1) { <add> String previews = DOM.createUniqueId(); <add> uploadPreview.getUploadCollection().setId(previews); <add> if (options.clickable.isEmpty()) { <add> String clickable = DOM.createUniqueId(); <add> <add> if (getWidget(1) instanceof MaterialUploadLabel) { <add> MaterialUploadLabel label = (MaterialUploadLabel) getWidget(1); <add> label.getIcon().setId(clickable); <add> } else { <add> getWidget(1).getElement().setId(clickable); <add> } <add> setClickable(clickable); <add> } <add> <add> if (!isPreview()) { <add> uploadPreview.setDisplay(Display.NONE); <add> } <add> <add> initDropzone(getElement(), <add> uploadPreview.getUploadCollection().getItem().getElement(), <add> previews, <add> uploadPreview.getElement(), <add> uploadPreview.getUploadHeader().getUploadedFiles().getElement()); <add> }else { <add> GWT.log("You don't have any child widget to use as a upload label"); <ide> } <del> <del> if (!isPreview()) { <del> uploadPreview.setDisplay(Display.NONE); <del> } <del> <del> initDropzone(getElement(), <del> uploadPreview.getUploadCollection().getItem().getElement(), <del> previews, <del> uploadPreview.getElement(), <del> uploadPreview.getUploadHeader().getUploadedFiles().getElement()); <ide> } <ide> <ide> /**
Java
apache-2.0
e563b471b1ffaf5b2109d824a739c265a9cea8a0
0
stormleoxia/teamcity-nuget-support,stormleoxia/teamcity-nuget-support,JetBrains/teamcity-nuget-support,JetBrains/teamcity-nuget-support,stormleoxia/teamcity-nuget-support,JetBrains/teamcity-nuget-support,JetBrains/teamcity-nuget-support
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.buildServer.nuget.tests.integration.feed.server; import com.intellij.execution.configurations.GeneralCommandLine; import jetbrains.buildServer.ExecResult; import jetbrains.buildServer.SimpleCommandLineProcessRunner; import jetbrains.buildServer.nuget.tests.integration.NuGet; import jetbrains.buildServer.nuget.tests.integration.Paths; import org.jetbrains.annotations.NotNull; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; /** * Created by Eugene Petrenko ([email protected]) * Date: 05.01.12 0:40 */ public class NuGetJavaFeedIntegrationTest extends NuGetJavaFeedIntegrationTestBase { private final String packageId_1 = "CommonServiceLocator"; private final String packageId_2 = "NuGet.Core"; @BeforeMethod @Override protected void setUp() throws Exception { super.setUp(); addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); } @Test(dataProvider = NUGET_VERSIONS) public void testNuGetClientReadsFeed(@NotNull final NuGet nuget) throws Exception{ enableDebug(); GeneralCommandLine cmd = new GeneralCommandLine(); cmd.setExePath(nuget.getPath().getPath()); cmd.addParameter("list"); cmd.addParameter("-Source"); cmd.addParameter(getNuGetServerUrl()); final ExecResult exec = SimpleCommandLineProcessRunner.runCommand(cmd, null); final String stdout = exec.getStdout(); System.out.println(stdout); System.out.println(exec.getStderr()); Assert.assertEquals(exec.getExitCode(), 0); Assert.assertTrue(stdout.contains(packageId_1), stdout); Assert.assertTrue(stdout.contains(packageId_2), stdout); } @Test(dataProvider = NUGET_VERSIONS) public void testNuGetClientReadsFeedQuery(@NotNull final NuGet nuget) throws Exception{ enableDebug(); GeneralCommandLine cmd = new GeneralCommandLine(); cmd.setExePath(nuget.getPath().getPath()); cmd.addParameter("list"); cmd.addParameter("Common"); cmd.addParameter("-Source"); cmd.addParameter(getNuGetServerUrl()); final ExecResult exec = SimpleCommandLineProcessRunner.runCommand(cmd, null); Assert.assertEquals(exec.getExitCode(), 0); final String stdout = exec.getStdout(); System.out.println(stdout); Assert.assertTrue(stdout.contains(packageId_1), stdout); Assert.assertTrue(stdout.contains(packageId_2), stdout); } }
nuget-tests/src/jetbrains/buildServer/nuget/tests/integration/feed/server/NuGetJavaFeedIntegrationTest.java
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.buildServer.nuget.tests.integration.feed.server; import com.intellij.execution.configurations.GeneralCommandLine; import jetbrains.buildServer.ExecResult; import jetbrains.buildServer.SimpleCommandLineProcessRunner; import jetbrains.buildServer.nuget.tests.integration.NuGet; import jetbrains.buildServer.nuget.tests.integration.Paths; import org.jetbrains.annotations.NotNull; import org.testng.Assert; import org.testng.annotations.Test; /** * Created by Eugene Petrenko ([email protected]) * Date: 05.01.12 0:40 */ public class NuGetJavaFeedIntegrationTest extends NuGetJavaFeedIntegrationTestBase { @Test(dataProvider = NUGET_VERSIONS) public void testNuGetClientReadsFeed(@NotNull final NuGet nuget) throws Exception{ enableDebug(); final String packageId_1 = "CommonServiceLocator"; final String packageId_2 = "NuGet.Core"; addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); GeneralCommandLine cmd = new GeneralCommandLine(); cmd.setExePath(nuget.getPath().getPath()); cmd.addParameter("list"); cmd.addParameter("-Source"); cmd.addParameter(getNuGetServerUrl()); final ExecResult exec = SimpleCommandLineProcessRunner.runCommand(cmd, null); final String stdout = exec.getStdout(); System.out.println(stdout); System.out.println(exec.getStderr()); Assert.assertEquals(exec.getExitCode(), 0); Assert.assertTrue(stdout.contains(packageId_1), stdout); Assert.assertTrue(stdout.contains(packageId_2), stdout); } @Test(dataProvider = NUGET_VERSIONS) public void testNuGetClientReadsFeedQuery(@NotNull final NuGet nuget) throws Exception{ enableDebug(); final String packageId_1 = "CommonServiceLocator"; final String packageId_2 = "NuGet.Core"; addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); GeneralCommandLine cmd = new GeneralCommandLine(); cmd.setExePath(nuget.getPath().getPath()); cmd.addParameter("list"); cmd.addParameter("Common"); cmd.addParameter("-Source"); cmd.addParameter(getNuGetServerUrl()); final ExecResult exec = SimpleCommandLineProcessRunner.runCommand(cmd, null); Assert.assertEquals(exec.getExitCode(), 0); final String stdout = exec.getStdout(); System.out.println(stdout); Assert.assertTrue(stdout.contains(packageId_1), stdout); Assert.assertTrue(stdout.contains(packageId_2), stdout); } }
minor... extract code
nuget-tests/src/jetbrains/buildServer/nuget/tests/integration/feed/server/NuGetJavaFeedIntegrationTest.java
minor... extract code
<ide><path>uget-tests/src/jetbrains/buildServer/nuget/tests/integration/feed/server/NuGetJavaFeedIntegrationTest.java <ide> import jetbrains.buildServer.nuget.tests.integration.Paths; <ide> import org.jetbrains.annotations.NotNull; <ide> import org.testng.Assert; <add>import org.testng.annotations.BeforeMethod; <ide> import org.testng.annotations.Test; <ide> <ide> /** <ide> * Date: 05.01.12 0:40 <ide> */ <ide> public class NuGetJavaFeedIntegrationTest extends NuGetJavaFeedIntegrationTestBase { <add> private final String packageId_1 = "CommonServiceLocator"; <add> private final String packageId_2 = "NuGet.Core"; <add> <add> @BeforeMethod <add> @Override <add> protected void setUp() throws Exception { <add> super.setUp(); <add> <add> addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); <add> addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); <add> } <ide> <ide> @Test(dataProvider = NUGET_VERSIONS) <ide> public void testNuGetClientReadsFeed(@NotNull final NuGet nuget) throws Exception{ <ide> enableDebug(); <del> <del> final String packageId_1 = "CommonServiceLocator"; <del> final String packageId_2 = "NuGet.Core"; <del> <del> addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); <del> addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); <ide> <ide> GeneralCommandLine cmd = new GeneralCommandLine(); <ide> cmd.setExePath(nuget.getPath().getPath()); <ide> public void testNuGetClientReadsFeedQuery(@NotNull final NuGet nuget) throws Exception{ <ide> enableDebug(); <ide> <del> final String packageId_1 = "CommonServiceLocator"; <del> final String packageId_2 = "NuGet.Core"; <del> <del> addPackage(Paths.getTestDataPath("/packages/" + packageId_1 + ".1.0.nupkg")); <del> addPackage(Paths.getTestDataPath("/packages/" + packageId_2 + ".1.5.20902.9026.nupkg")); <del> <ide> GeneralCommandLine cmd = new GeneralCommandLine(); <ide> cmd.setExePath(nuget.getPath().getPath()); <ide> cmd.addParameter("list");
Java
apache-2.0
fff1820e6276ef3a526fb91f9c6c4c3c1dfb848c
0
anomaly/closure-compiler,Yannic/closure-compiler,GerHobbelt/closure-compiler,tiobe/closure-compiler,google/closure-compiler,GerHobbelt/closure-compiler,mprobst/closure-compiler,Yannic/closure-compiler,tiobe/closure-compiler,shantanusharma/closure-compiler,ChadKillingsworth/closure-compiler,MatrixFrog/closure-compiler,mprobst/closure-compiler,tdelmas/closure-compiler,ChadKillingsworth/closure-compiler,anomaly/closure-compiler,nawawi/closure-compiler,mprobst/closure-compiler,vobruba-martin/closure-compiler,ChadKillingsworth/closure-compiler,ChadKillingsworth/closure-compiler,Yannic/closure-compiler,Yannic/closure-compiler,monetate/closure-compiler,anomaly/closure-compiler,google/closure-compiler,nawawi/closure-compiler,google/closure-compiler,shantanusharma/closure-compiler,vobruba-martin/closure-compiler,nawawi/closure-compiler,tdelmas/closure-compiler,tdelmas/closure-compiler,monetate/closure-compiler,vobruba-martin/closure-compiler,google/closure-compiler,MatrixFrog/closure-compiler,GerHobbelt/closure-compiler,GerHobbelt/closure-compiler,monetate/closure-compiler,vobruba-martin/closure-compiler,MatrixFrog/closure-compiler,MatrixFrog/closure-compiler,monetate/closure-compiler,tiobe/closure-compiler,shantanusharma/closure-compiler,nawawi/closure-compiler,shantanusharma/closure-compiler,tdelmas/closure-compiler,tiobe/closure-compiler,mprobst/closure-compiler,anomaly/closure-compiler
/* * Copyright 2004 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.debugging.sourcemap.proto.Mapping.OriginalMapping; import com.google.javascript.jscomp.CompilerOptions.DevMode; import com.google.javascript.jscomp.CoverageInstrumentationPass.CoverageReach; import com.google.javascript.jscomp.CoverageInstrumentationPass.InstrumentOption; import com.google.javascript.jscomp.WarningsGuard.DiagnosticGroupState; import com.google.javascript.jscomp.deps.JsFileParser; import com.google.javascript.jscomp.deps.ModuleLoader; import com.google.javascript.jscomp.deps.SortedDependencies.MissingProvideException; import com.google.javascript.jscomp.parsing.Config; import com.google.javascript.jscomp.parsing.ParserRunner; import com.google.javascript.jscomp.parsing.parser.trees.Comment; import com.google.javascript.jscomp.type.ChainableReverseAbstractInterpreter; import com.google.javascript.jscomp.type.ClosureReverseAbstractInterpreter; import com.google.javascript.jscomp.type.ReverseAbstractInterpreter; import com.google.javascript.jscomp.type.SemanticReverseAbstractInterpreter; import com.google.javascript.rhino.ErrorReporter; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.InputId; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TypeIRegistry; import com.google.javascript.rhino.jstype.JSTypeRegistry; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.PrintStream; import java.io.Serializable; import java.nio.file.FileSystems; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; /** * Compiler (and the other classes in this package) does the following: * <ul> * <li>parses JS code * <li>checks for undefined variables * <li>performs optimizations such as constant folding and constants inlining * <li>renames variables (to short names) * <li>outputs compact JavaScript code * </ul> * * External variables are declared in 'externs' files. For instance, the file * may include definitions for global javascript/browser objects such as * window, document. * */ public class Compiler extends AbstractCompiler implements ErrorHandler, SourceFileMapping { static final String SINGLETON_MODULE_NAME = "$singleton$"; static final DiagnosticType MODULE_DEPENDENCY_ERROR = DiagnosticType.error("JSC_MODULE_DEPENDENCY_ERROR", "Bad dependency: {0} -> {1}. " + "Modules must be listed in dependency order."); static final DiagnosticType MISSING_ENTRY_ERROR = DiagnosticType.error( "JSC_MISSING_ENTRY_ERROR", "required entry point \"{0}\" never provided"); static final DiagnosticType MISSING_MODULE_ERROR = DiagnosticType.error( "JSC_MISSING_ENTRY_ERROR", "unknown module \"{0}\" specified in entry point spec"); // Used in PerformanceTracker static final String READING_PASS_NAME = "readInputs"; static final String PARSING_PASS_NAME = "parseInputs"; static final String PEEPHOLE_PASS_NAME = "peepholeOptimizations"; static final String UNREACHABLE_CODE_ELIM_NAME = "removeUnreachableCode"; private static final String CONFIG_RESOURCE = "com.google.javascript.jscomp.parsing.ParserConfig"; CompilerOptions options = null; private PassConfig passes = null; // The externs inputs private List<CompilerInput> externs; // The JS source modules private List<JSModule> modules; private JSModuleGraph moduleGraph; // The module loader for resolving paths into module URIs. private ModuleLoader moduleLoader; // The JS source inputs private List<CompilerInput> inputs; // error manager to which error management is delegated private ErrorManager errorManager; // Warnings guard for filtering warnings. private WarningsGuard warningsGuard; // Compile-time injected libraries. The node points to the last node of // the library, so code can be inserted after. private final Map<String, Node> injectedLibraries = new LinkedHashMap<>(); // Node of the final injected library. Future libraries will be injected // after this node. private Node lastInjectedLibrary; // Parse tree root nodes Node externsRoot; Node jsRoot; Node externAndJsRoot; // Used for debugging; to see the compiled code between passes private String lastJsSource = null; /** @see #getLanguageMode() */ private CompilerOptions.LanguageMode languageMode = CompilerOptions.LanguageMode.ECMASCRIPT3; private final Map<InputId, CompilerInput> inputsById = new ConcurrentHashMap<>(); /** * Subclasses are responsible for loading soures that were not provided as explicit inputs to the * compiler. For example, looking up sources referenced within sourcemaps. */ public static class ExternalSourceLoader { public SourceFile loadSource(String filename) { throw new RuntimeException("Cannot load without a valid loader."); } } private ExternalSourceLoader originalSourcesLoader = new ExternalSourceLoader() { // TODO(tdeegan): The @GwtIncompatible tree needs to be cleaned up. @Override @GwtIncompatible("SourceFile.fromFile") public SourceFile loadSource(String filename) { return SourceFile.fromFile(filename); } }; // Original sources referenced by the source maps. private ConcurrentHashMap<String, SourceFile> sourceMapOriginalSources = new ConcurrentHashMap<>(); /** Configured {@link SourceMapInput}s, plus any source maps discovered in source files. */ private final ConcurrentHashMap<String, SourceMapInput> inputSourceMaps = new ConcurrentHashMap<>(); // Map from filenames to lists of all the comments in each file. private Map<String, List<Comment>> commentsPerFile = new ConcurrentHashMap<>(); /** The source code map */ private SourceMap sourceMap; /** The externs created from the exports. */ private String externExports = null; /** * Ids for function inlining so that each declared name remains * unique. */ private int uniqueNameId = 0; /** * Whether to assume there are references to the RegExp Global object * properties. */ private boolean hasRegExpGlobalReferences = true; /** The function information map */ private FunctionInformationMap functionInformationMap; /** Debugging information */ private final StringBuilder debugLog = new StringBuilder(); /** Detects Google-specific coding conventions. */ CodingConvention defaultCodingConvention = new ClosureCodingConvention(); private JSTypeRegistry typeRegistry; private volatile Config parserConfig = null; private volatile Config externsParserConfig = null; private ReverseAbstractInterpreter abstractInterpreter; private TypeValidator typeValidator; // The compiler can ask phaseOptimizer for things like which pass is currently // running, or which functions have been changed by optimizations private PhaseOptimizer phaseOptimizer = null; public PerformanceTracker tracker; // Used by optimize-returns, optimize-parameters and remove-unused-variables private DefinitionUseSiteFinder defFinder = null; // Types that have been forward declared private Set<String> forwardDeclaredTypes = new HashSet<>(); // For use by the new type inference private GlobalTypeInfo symbolTable; private MostRecentTypechecker mostRecentTypechecker = MostRecentTypechecker.NONE; // This error reporter gets the messages from the current Rhino parser or TypeRegistry. private final ErrorReporter oldErrorReporter = RhinoErrorReporter.forOldRhino(this); /** Error strings used for reporting JSErrors */ public static final DiagnosticType OPTIMIZE_LOOP_ERROR = DiagnosticType.error( "JSC_OPTIMIZE_LOOP_ERROR", "Exceeded max number of optimization iterations: {0}"); public static final DiagnosticType MOTION_ITERATIONS_ERROR = DiagnosticType.error("JSC_OPTIMIZE_LOOP_ERROR", "Exceeded max number of code motion iterations: {0}"); private final CompilerExecutor compilerExecutor = new CompilerExecutor(); /** * Logger for the whole com.google.javascript.jscomp domain - * setting configuration for this logger affects all loggers * in other classes within the compiler. */ public static final Logger logger = Logger.getLogger("com.google.javascript.jscomp"); private final PrintStream outStream; private GlobalVarReferenceMap globalRefMap = null; private volatile double progress = 0.0; private String lastPassName; private Set<String> externProperties = null; private static final Joiner pathJoiner = Joiner.on(File.separator); // TODO(johnlenz): remove "currentScope". // Used as a shortcut for change tracking. This is the current scope being // visited by the "current" NodeTraversal. This can't be thread safe so // we should move it into the NodeTraversal and require explicit changed // nodes elsewhere so we aren't blocked from doing this elsewhere. private Node currentChangeScope = null; // Starts at 0, increases as "interesting" things happen. // Nothing happens at time START_TIME, the first pass starts at time 1. // The correctness of scope-change tracking relies on Node/getIntProp // returning 0 if the custom attribute on a node hasn't been set. private int changeStamp = 1; /** * Creates a Compiler that reports errors and warnings to its logger. */ public Compiler() { this((PrintStream) null); } /** * Creates a Compiler that reports errors and warnings to an output stream. */ public Compiler(PrintStream stream) { addChangeHandler(recentChange); this.outStream = stream; } /** * Creates a Compiler that uses a custom error manager. */ public Compiler(ErrorManager errorManager) { this(); setErrorManager(errorManager); } /** * Sets the error manager. * * @param errorManager the error manager, it cannot be {@code null} */ public void setErrorManager(ErrorManager errorManager) { checkNotNull(errorManager, "the error manager cannot be null"); this.errorManager = new ThreadSafeDelegatingErrorManager(errorManager); } /** * Creates a message formatter instance corresponding to the value of * {@link CompilerOptions}. */ private MessageFormatter createMessageFormatter() { boolean colorize = options.shouldColorizeErrorOutput(); return options.errorFormat.toFormatter(this, colorize); } @VisibleForTesting void setOriginalSourcesLoader(ExternalSourceLoader originalSourcesLoader) { this.originalSourcesLoader = originalSourcesLoader; } /** * Initializes the compiler options. It's called as part of a normal compile() job. * Public for the callers that are not doing a normal compile() job. */ public void initOptions(CompilerOptions options) { this.options = options; this.languageMode = options.getLanguageIn(); if (errorManager == null) { if (this.outStream == null) { setErrorManager( new LoggerErrorManager(createMessageFormatter(), logger)); } else { PrintStreamErrorManager printer = new PrintStreamErrorManager(createMessageFormatter(), this.outStream); printer.setSummaryDetailLevel(options.summaryDetailLevel); setErrorManager(printer); } } reconcileOptionsWithGuards(); // TODO(johnlenz): generally, the compiler should not be changing the options object // provided by the user. This should be handled a different way. // Turn off type-based optimizations when type checking is off if (!options.checkTypes) { options.setDisambiguateProperties(false); options.setAmbiguateProperties(false); options.setInlineProperties(false); options.setUseTypesForLocalOptimization(false); options.setUseTypesForOptimization(false); } if (options.legacyCodeCompile) { options.setDisambiguateProperties(false); options.setAmbiguateProperties(false); options.useNonStrictWarningsGuard(); } if (options.assumeForwardDeclaredForMissingTypes) { this.forwardDeclaredTypes = new AbstractSet<String>() { @Override public boolean contains(Object o) { return true; // Report all types as forward declared types. } @Override public boolean add(String e) { return false; } @Override public Iterator<String> iterator() { return Collections.<String>emptySet().iterator(); } @Override public int size() { return 0; } }; } initWarningsGuard(options.getWarningsGuard()); } public void printConfig(PrintStream printStream) { printStream.println("==== CompilerOptions ===="); printStream.println(options.toString()); printStream.println("==== WarningsGuard ===="); printStream.println(warningsGuard.toString()); } void initWarningsGuard(WarningsGuard warningsGuard) { this.warningsGuard = new ComposeWarningsGuard( new SuppressDocWarningsGuard(getDiagnosticGroups().getRegisteredGroups()), warningsGuard); } /** * When the CompilerOptions and its WarningsGuard overlap, reconcile * any discrepencies. */ protected void reconcileOptionsWithGuards() { // DiagnosticGroups override the plain checkTypes option. if (options.enables(DiagnosticGroups.CHECK_TYPES)) { options.checkTypes = true; } else if (options.disables(DiagnosticGroups.CHECK_TYPES)) { options.checkTypes = false; } else if (!options.checkTypes) { // If DiagnosticGroups did not override the plain checkTypes // option, and checkTypes is enabled, then turn off the // parser type warnings. options.setWarningLevel( DiagnosticGroup.forType( RhinoErrorReporter.TYPE_PARSE_ERROR), CheckLevel.OFF); } DiagnosticGroupState ntiState = options.getWarningsGuard().enablesExplicitly(DiagnosticGroups.NEW_CHECK_TYPES); if (ntiState == DiagnosticGroupState.ON) { options.setNewTypeInference(true); } else if (ntiState == DiagnosticGroupState.OFF) { options.setNewTypeInference(false); } // With NTI, we still need OTI to run because the later passes that use // types only understand OTI types at the moment. // But we do not want to see the warnings from OTI. if (options.getNewTypeInference()) { options.checkTypes = true; // Suppress warnings from the const checks of CheckAccessControls so as to avoid // duplication. options.setWarningLevel(DiagnosticGroups.ACCESS_CONTROLS_CONST, CheckLevel.OFF); if (!options.reportOTIErrorsUnderNTI) { options.setWarningLevel( DiagnosticGroups.OLD_CHECK_TYPES, CheckLevel.OFF); options.setWarningLevel( DiagnosticGroups.OLD_REPORT_UNKNOWN_TYPES, CheckLevel.OFF); options.setWarningLevel( FunctionTypeBuilder.ALL_DIAGNOSTICS, CheckLevel.OFF); } options.setWarningLevel( DiagnosticGroup.forType(RhinoErrorReporter.TYPE_PARSE_ERROR), CheckLevel.WARNING); } if (options.checkGlobalThisLevel.isOn() && !options.disables(DiagnosticGroups.GLOBAL_THIS)) { options.setWarningLevel( DiagnosticGroups.GLOBAL_THIS, options.checkGlobalThisLevel); } if (expectStrictModeInput()) { options.setWarningLevel( DiagnosticGroups.ES5_STRICT, CheckLevel.ERROR); } // All passes must run the variable check. This synthesizes // variables later so that the compiler doesn't crash. It also // checks the externs file for validity. If you don't want to warn // about missing variable declarations, we shut that specific // error off. if (!options.checkSymbols && !options.enables(DiagnosticGroups.CHECK_VARIABLES)) { options.setWarningLevel( DiagnosticGroups.CHECK_VARIABLES, CheckLevel.OFF); } } private boolean expectStrictModeInput() { switch (options.getLanguageIn()) { case ECMASCRIPT3: case ECMASCRIPT5: case ECMASCRIPT6: return false; case ECMASCRIPT5_STRICT: case ECMASCRIPT6_STRICT: case ECMASCRIPT6_TYPED: return true; default: return options.isStrictModeInput(); } } /** * Initializes the instance state needed for a compile job. */ public <T1 extends SourceFile, T2 extends SourceFile> void init( List<T1> externs, List<T2> inputs, CompilerOptions options) { JSModule module = new JSModule(SINGLETON_MODULE_NAME); for (SourceFile input : inputs) { module.add(input); } List<JSModule> modules = new ArrayList<>(1); modules.add(module); initModules(externs, modules, options); addFilesToSourceMap(inputs); if (options.printConfig) { printConfig(System.err); } } /** * Initializes the instance state needed for a compile job if the sources * are in modules. */ public <T extends SourceFile> void initModules( List<T> externs, List<JSModule> modules, CompilerOptions options) { initOptions(options); checkFirstModule(modules); fillEmptyModules(modules); this.externs = makeCompilerInput(externs, true); // Generate the module graph, and report any errors in the module // specification as errors. this.modules = modules; try { this.moduleGraph = new JSModuleGraph(modules); } catch (JSModuleGraph.ModuleDependenceException e) { // problems with the module format. Report as an error. The // message gives all details. report(JSError.make(MODULE_DEPENDENCY_ERROR, e.getModule().getName(), e.getDependentModule().getName())); return; } this.inputs = getAllInputsFromModules(modules); this.commentsPerFile = new ConcurrentHashMap<>(inputs.size()); initBasedOnOptions(); initInputsByIdMap(); initAST(); } /** * Exists only for some tests that want to reuse JSModules. * @deprecated Fix those tests. */ @Deprecated public void breakThisCompilerSoItsModulesCanBeReused() { moduleGraph.breakThisGraphSoItsModulesCanBeReused(); moduleGraph = null; } /** * Do any initialization that is dependent on the compiler options. */ public void initBasedOnOptions() { inputSourceMaps.putAll(options.inputSourceMaps); // Create the source map if necessary. if (options.sourceMapOutputPath != null) { sourceMap = options.sourceMapFormat.getInstance(); sourceMap.setPrefixMappings(options.sourceMapLocationMappings); if (options.applyInputSourceMaps) { sourceMap.setSourceFileMapping(this); } } } private <T extends SourceFile> List<CompilerInput> makeCompilerInput( List<T> files, boolean isExtern) { List<CompilerInput> inputs = new ArrayList<>(files.size()); for (T file : files) { inputs.add(new CompilerInput(file, isExtern)); } return inputs; } private static final DiagnosticType EMPTY_MODULE_LIST_ERROR = DiagnosticType.error("JSC_EMPTY_MODULE_LIST_ERROR", "At least one module must be provided"); private static final DiagnosticType EMPTY_ROOT_MODULE_ERROR = DiagnosticType.error("JSC_EMPTY_ROOT_MODULE_ERROR", "Root module ''{0}'' must contain at least one source code input"); /** * Verifies that at least one module has been provided and that the first one * has at least one source code input. */ private void checkFirstModule(List<JSModule> modules) { if (modules.isEmpty()) { report(JSError.make(EMPTY_MODULE_LIST_ERROR)); } else if (modules.get(0).getInputs().isEmpty() && modules.size() > 1) { // The root module may only be empty if there is exactly 1 module. report(JSError.make(EMPTY_ROOT_MODULE_ERROR, modules.get(0).getName())); } } /** * Empty modules get an empty "fill" file, so that we can move code into * an empty module. */ static String createFillFileName(String moduleName) { return moduleName + "$fillFile"; } /** * Creates an OS specific path string from parts */ public static String joinPathParts(String... pathParts) { return pathJoiner.join(pathParts); } /** * Fill any empty modules with a place holder file. It makes any cross module * motion easier. */ private static void fillEmptyModules(List<JSModule> modules) { for (JSModule module : modules) { if (module.getInputs().isEmpty()) { module.add(SourceFile.fromCode( createFillFileName(module.getName()), "")); } } } /** * Rebuilds the internal list of inputs by iterating over all modules. * This is necessary if inputs have been added to or removed from a module * after the {@link #init(List, List, CompilerOptions)} call. */ public void rebuildInputsFromModules() { inputs = getAllInputsFromModules(modules); initInputsByIdMap(); } /** * Builds a single list of all module inputs. Verifies that it contains no * duplicates. */ private static List<CompilerInput> getAllInputsFromModules( List<JSModule> modules) { List<CompilerInput> inputs = new ArrayList<>(); Map<String, JSModule> inputMap = new HashMap<>(); for (JSModule module : modules) { for (CompilerInput input : module.getInputs()) { String inputName = input.getName(); // NOTE(nicksantos): If an input is in more than one module, // it will show up twice in the inputs list, and then we // will get an error down the line. inputs.add(input); inputMap.put(inputName, module); } } return inputs; } static final DiagnosticType DUPLICATE_INPUT = DiagnosticType.error("JSC_DUPLICATE_INPUT", "Duplicate input: {0}"); static final DiagnosticType DUPLICATE_EXTERN_INPUT = DiagnosticType.error("JSC_DUPLICATE_EXTERN_INPUT", "Duplicate extern input: {0}"); /** * Returns the relative path, resolved relative to the base path, where the * base path is interpreted as a filename rather than a directory. E.g.: * getRelativeTo("../foo/bar.js", "baz/bam/qux.js") --> "baz/foo/bar.js" */ private static String getRelativeTo(String relative, String base) { return FileSystems.getDefault().getPath(base) .resolveSibling(relative) .normalize() .toString() .replace(File.separator, "/"); } /** * Creates a map to make looking up an input by name fast. Also checks for * duplicate inputs. */ void initInputsByIdMap() { inputsById.clear(); for (CompilerInput input : externs) { InputId id = input.getInputId(); CompilerInput previous = putCompilerInput(id, input); if (previous != null) { report(JSError.make(DUPLICATE_EXTERN_INPUT, input.getName())); } } for (CompilerInput input : inputs) { InputId id = input.getInputId(); CompilerInput previous = putCompilerInput(id, input); if (previous != null) { report(JSError.make(DUPLICATE_INPUT, input.getName())); } } } /** * Sets up the skeleton of the AST (the externs and root). */ private void initAST() { jsRoot = IR.root(); externsRoot = IR.root(); externAndJsRoot = IR.root(externsRoot, jsRoot); } /** Compiles a single source file and a single externs file. */ public Result compile(SourceFile extern, SourceFile input, CompilerOptions options) { return compile(ImmutableList.of(extern), ImmutableList.of(input), options); } /** * Compiles a list of inputs. * * <p>This is a convenience method to wrap up all the work of compilation, including * generating the error and warning report. * * <p>NOTE: All methods called here must be public, because client code must be able to replicate * and customize this. */ public <T1 extends SourceFile, T2 extends SourceFile> Result compile( List<T1> externs, List<T2> inputs, CompilerOptions options) { // The compile method should only be called once. checkState(jsRoot == null); try { init(externs, inputs, options); if (!hasErrors()) { parseForCompilation(); } if (!hasErrors()) { if (options.getInstrumentForCoverageOnly()) { // TODO(bradfordcsmith): The option to instrument for coverage only should belong to the // runner, not the compiler. instrumentForCoverage(); } else { stage1Passes(); if (!hasErrors()) { stage2Passes(); } } completeCompilation(); } } finally { generateReport(); } return getResult(); } /** * Generates a report of all warnings and errors found during compilation to stderr. * * <p>Client code must call this method explicitly if it doesn't use one of the convenience * methods that do so automatically. * <p>Always call this method, even if the compiler throws an exception. The report will include * information about the exception. */ public void generateReport() { Tracer t = newTracer("generateReport"); errorManager.generateReport(); stopTracer(t, "generateReport"); } /** * Compiles a list of modules. * * <p>This is a convenience method to wrap up all the work of compilation, including * generating the error and warning report. * * <p>NOTE: All methods called here must be public, because client code must be able to replicate * and customize this. */ public <T extends SourceFile> Result compileModules( List<T> externs, List<JSModule> modules, CompilerOptions options) { // The compile method should only be called once. checkState(jsRoot == null); try { initModules(externs, modules, options); if (!hasErrors()) { parseForCompilation(); } if (!hasErrors()) { // TODO(bradfordcsmith): The option to instrument for coverage only should belong to the // runner, not the compiler. if (options.getInstrumentForCoverageOnly()) { instrumentForCoverage(); } else { stage1Passes(); if (!hasErrors()) { stage2Passes(); } } completeCompilation(); } } finally { generateReport(); } return getResult(); } /** * Perform compiler passes for stage 1 of compilation. * * <p>Stage 1 consists primarily of error and type checking passes. * * <p>{@code parseForCompilation()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. */ public void stage1Passes() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); checkState(!options.getInstrumentForCoverageOnly()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { performChecksAndTranspilation(); return null; } }); } /** * Perform compiler passes for stage 2 of compilation. * * <p>Stage 2 consists primarily of optimization passes. * * <p>{@code stage1Passes()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. */ public void stage2Passes() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); checkState(!options.getInstrumentForCoverageOnly()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { if (options.shouldOptimize()) { performOptimizations(); } return null; } }); } /** * Disable threads. This is for clients that run on AppEngine and * don't have threads. */ public void disableThreads() { compilerExecutor.disableThreads(); } /** * Sets the timeout when Compiler is run in a thread * @param timeout seconds to wait before timeout */ public void setTimeout(int timeout) { compilerExecutor.setTimeout(timeout); } /** * The primary purpose of this method is to run the provided code with a larger than standard * stack. */ <T> T runInCompilerThread(Callable<T> callable) { return compilerExecutor.runInCompilerThread( callable, options != null && options.getTracerMode().isOn()); } private void performChecksAndTranspilation() { if (options.skipNonTranspilationPasses) { // i.e. whitespace-only mode, which will not work with goog.module without: whitespaceOnlyPasses(); if (options.lowerFromEs6()) { transpileAndDontCheck(); } } else { check(); // check() also includes transpilation } } /** * Performs all the bookkeeping required at the end of a compilation. * * <p>This method must be called if the compilation makes it as far as doing checks. * <p> DON'T call it if the compiler threw an exception. * <p> DO call it even when {@code hasErrors()} returns true. */ public void completeCompilation() { runInCompilerThread(new Callable<Void>() { @Override public Void call() throws Exception { completeCompilationInternal(); return null; } }); } /** * Performs all the bookkeeping required at the end of a compilation. */ private void completeCompilationInternal() { if (options.recordFunctionInformation) { recordFunctionInformation(); } if (options.devMode == DevMode.START_AND_END) { runSanityCheck(); } setProgress(1.0, "recordFunctionInformation"); if (tracker != null) { tracker.outputTracerReport(); } } /** * Instrument code for coverage. * * <p>{@code parseForCompilation()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. * * <p>This method is mutually exclusive with stage1Passes() and stage2Passes(). * Either call those two methods or this one, but not both. */ public void instrumentForCoverage() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { checkState(options.getInstrumentForCoverageOnly()); checkState(!hasErrors()); instrumentForCoverageInternal(options.instrumentBranchCoverage); return null; } }); } private void instrumentForCoverageInternal(boolean instrumentBranchCoverage) { Tracer tracer = newTracer("instrumentationPass"); InstrumentOption instrumentOption = InstrumentOption.LINE_ONLY; if (instrumentBranchCoverage) { instrumentOption = InstrumentOption.BRANCH_ONLY; } process(new CoverageInstrumentationPass(this, CoverageReach.ALL, instrumentOption)); stopTracer(tracer, "instrumentationPass"); } /** * Parses input files in preparation for compilation. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * <p>TODO(bradfordcsmith): Rename this to parse() */ public void parseForCompilation() { runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { parseForCompilationInternal(); return null; } }); } /** * Parses input files in preparation for compilation. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * * <p>TODO(bradfordcsmith): Rename this to parse() */ private void parseForCompilationInternal() { setProgress(0.0, null); CompilerOptionsPreprocessor.preprocess(options); readInputs(); // Guesstimate. setProgress(0.02, "read"); parseInputs(); // Guesstimate. setProgress(0.15, "parse"); } /** * Parses input files without doing progress tracking that is part of a full compile. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * <p>TODO(bradfordcsmith): Rename this to parseIndependentOfCompilation() or similar. */ public void parse() { parseInputs(); } PassConfig getPassConfig() { if (passes == null) { passes = createPassConfigInternal(); } return passes; } /** * Create the passes object. Clients should use setPassConfig instead of * overriding this. */ PassConfig createPassConfigInternal() { return new DefaultPassConfig(options); } /** * @param passes The PassConfig to use with this Compiler. * @throws NullPointerException if passes is null * @throws IllegalStateException if this.passes has already been assigned */ public void setPassConfig(PassConfig passes) { // Important to check for null because if setPassConfig(null) is // called before this.passes is set, getPassConfig() will create a // new PassConfig object and use that, which is probably not what // the client wanted since they probably meant to use their // own PassConfig object. checkNotNull(passes); checkState(this.passes == null, "setPassConfig was already called"); this.passes = passes; } public void whitespaceOnlyPasses() { runCustomPasses(CustomPassExecutionTime.BEFORE_CHECKS); Tracer t = newTracer("runWhitespaceOnlyPasses"); try { for (PassFactory pf : getPassConfig().getWhitespaceOnlyPasses()) { pf.create(this).process(externsRoot, jsRoot); } } finally { stopTracer(t, "runWhitespaceOnlyPasses"); } } public void transpileAndDontCheck() { Tracer t = newTracer("runTranspileOnlyPasses"); try { for (PassFactory pf : getPassConfig().getTranspileOnlyPasses()) { pf.create(this).process(externsRoot, jsRoot); } } finally { stopTracer(t, "runTranspileOnlyPasses"); } } private PhaseOptimizer createPhaseOptimizer() { PhaseOptimizer phaseOptimizer = new PhaseOptimizer(this, tracker); if (options.devMode == DevMode.EVERY_PASS) { phaseOptimizer.setSanityCheck(sanityCheck); } if (options.getCheckDeterminism()) { phaseOptimizer.setPrintAstHashcodes(true); } return phaseOptimizer; } void check() { runCustomPasses(CustomPassExecutionTime.BEFORE_CHECKS); // We are currently only interested in check-passes for progress reporting // as it is used for IDEs, that's why the maximum progress is set to 1.0. phaseOptimizer = createPhaseOptimizer().withProgress( new PhaseOptimizer.ProgressRange(getProgress(), 1.0)); phaseOptimizer.consume(getPassConfig().getChecks()); phaseOptimizer.process(externsRoot, jsRoot); if (hasErrors()) { return; } if (options.getTweakProcessing().shouldStrip() || !options.stripTypes.isEmpty() || !options.stripNameSuffixes.isEmpty() || !options.stripTypePrefixes.isEmpty() || !options.stripNamePrefixes.isEmpty()) { stripCode(options.stripTypes, options.stripNameSuffixes, options.stripTypePrefixes, options.stripNamePrefixes); } runCustomPasses(CustomPassExecutionTime.BEFORE_OPTIMIZATIONS); phaseOptimizer = null; } @Override void setExternExports(String externExports) { this.externExports = externExports; } @Override void process(CompilerPass p) { p.process(externsRoot, jsRoot); } private final PassFactory sanityCheck = new PassFactory("sanityCheck", false) { @Override protected CompilerPass create(AbstractCompiler compiler) { return new SanityCheck(compiler); } }; private void maybeSanityCheck() { if (options.devMode == DevMode.EVERY_PASS) { runSanityCheck(); } } private void runSanityCheck() { sanityCheck.create(this).process(externsRoot, jsRoot); } /** * Strips code for smaller compiled code. This is useful for removing debug * statements to prevent leaking them publicly. */ void stripCode(Set<String> stripTypes, Set<String> stripNameSuffixes, Set<String> stripTypePrefixes, Set<String> stripNamePrefixes) { logger.fine("Strip code"); startPass("stripCode"); StripCode r = new StripCode(this, stripTypes, stripNameSuffixes, stripTypePrefixes, stripNamePrefixes); if (options.getTweakProcessing().shouldStrip()) { r.enableTweakStripping(); } process(r); endPass("stripCode"); } /** * Runs custom passes that are designated to run at a particular time. */ private void runCustomPasses(CustomPassExecutionTime executionTime) { if (options.customPasses != null) { Tracer t = newTracer("runCustomPasses"); try { for (CompilerPass p : options.customPasses.get(executionTime)) { process(p); } } finally { stopTracer(t, "runCustomPasses"); } } } private Tracer currentTracer = null; private String currentPassName = null; /** * Marks the beginning of a pass. */ void startPass(String passName) { checkState(currentTracer == null); currentPassName = passName; currentTracer = newTracer(passName); beforePass(passName); } /** * Marks the end of a pass. */ void endPass(String passName) { checkState(currentTracer != null, "Tracer should not be null at the end of a pass."); stopTracer(currentTracer, currentPassName); afterPass(passName); currentPassName = null; currentTracer = null; maybeSanityCheck(); } @Override final void beforePass(String passName) { // does nothing for now } @Override final void afterPass(String passName) { if (options.printSourceAfterEachPass) { String currentJsSource = getCurrentJsSource(); if (!currentJsSource.equals(this.lastJsSource)) { System.out.println(); System.out.println("// " + passName + " yields:"); System.out.println("// ************************************"); System.out.println(currentJsSource); lastJsSource = currentJsSource; } } } final String getCurrentJsSource() { List<String> filenames = options.filesToPrintAfterEachPass; if (filenames.isEmpty()) { return toSource(); } else { StringBuilder builder = new StringBuilder(); for (String filename : filenames) { Node script = getScriptNode(filename); String source = script != null ? "// " + script.getSourceFileName() + "\n" + toSource(script) : "File '" + filename + "' not found"; builder.append(source); } return builder.toString(); } } final Node getScriptNode(String filename) { for (Node file : jsRoot.children()) { if (file.getSourceFileName() != null && file.getSourceFileName().endsWith(filename)) { return file; } } return null; } /** * Returns a new tracer for the given pass name. */ Tracer newTracer(String passName) { String comment = passName + (recentChange.hasCodeChanged() ? " on recently changed AST" : ""); if (options.getTracerMode().isOn() && tracker != null) { tracker.recordPassStart(passName, true); } return new Tracer("Compiler", comment); } void stopTracer(Tracer t, String passName) { long result = t.stop(); if (options.getTracerMode().isOn() && tracker != null) { tracker.recordPassStop(passName, result); } } /** * Returns the result of the compilation. */ public Result getResult() { PassConfig.State state = getPassConfig().getIntermediateState(); Set<SourceFile> transpiledFiles = new HashSet<>(); if (jsRoot != null) { for (Node scriptNode : jsRoot.children()) { if (scriptNode.getBooleanProp(Node.TRANSPILED)) { transpiledFiles.add(getSourceFileByName(scriptNode.getSourceFileName())); } } } return new Result(getErrors(), getWarnings(), debugLog.toString(), state.variableMap, state.propertyMap, state.anonymousFunctionNameMap, state.stringMap, functionInformationMap, sourceMap, externExports, state.cssNames, state.idGeneratorMap, transpiledFiles); } /** * Returns the array of errors (never null). */ public JSError[] getErrors() { if (errorManager == null) { return new JSError[] {}; } return errorManager.getErrors(); } /** * Returns the array of warnings (never null). */ public JSError[] getWarnings() { if (errorManager == null) { return new JSError[] {}; } return errorManager.getWarnings(); } @Override public Node getRoot() { return externAndJsRoot; } @Override CompilerOptions.LanguageMode getLanguageMode() { return languageMode; } @Override void setLanguageMode(CompilerOptions.LanguageMode mode) { languageMode = mode; } /** * Creates a new id for making unique names. */ private int nextUniqueNameId() { return uniqueNameId++; } /** * Resets the unique name id counter */ @VisibleForTesting void resetUniqueNameId() { uniqueNameId = 0; } @Override Supplier<String> getUniqueNameIdSupplier() { final Compiler self = this; return new Supplier<String>() { @Override public String get() { return String.valueOf(self.nextUniqueNameId()); } }; } @Override boolean areNodesEqualForInlining(Node n1, Node n2) { if (options.shouldAmbiguateProperties() || options.shouldDisambiguateProperties()) { // The type based optimizations require that type information is preserved // during other optimizations. return n1.isEquivalentToTyped(n2); } else { return n1.isEquivalentTo(n2); } } //------------------------------------------------------------------------ // Inputs //------------------------------------------------------------------------ // TODO(nicksantos): Decide which parts of these belong in an AbstractCompiler // interface, and which ones should always be injected. @Override public CompilerInput getInput(InputId id) { // TODO(bradfordcsmith): Allowing null id is less ideal. Add checkNotNull(id) here and fix // call sites that break. if (id == null) { return null; } return inputsById.get(id); } /** * Removes an input file from AST. * @param id The id of the input to be removed. */ protected void removeExternInput(InputId id) { CompilerInput input = getInput(id); if (input == null) { return; } checkState(input.isExtern(), "Not an extern input: %s", input.getName()); inputsById.remove(id); externs.remove(input); Node root = input.getAstRoot(this); if (root != null) { root.detach(); } } // Where to put a new synthetic externs file. private static enum SyntheticExternsPosition { START, END } CompilerInput newExternInput(String name, SyntheticExternsPosition pos) { SourceAst ast = new SyntheticAst(name); if (inputsById.containsKey(ast.getInputId())) { throw new IllegalArgumentException("Conflicting externs name: " + name); } CompilerInput input = new CompilerInput(ast, true); putCompilerInput(input.getInputId(), input); if (pos == SyntheticExternsPosition.START) { externsRoot.addChildToFront(ast.getAstRoot(this)); externs.add(0, input); } else { externsRoot.addChildToBack(ast.getAstRoot(this)); externs.add(input); } return input; } CompilerInput putCompilerInput(InputId id, CompilerInput input) { input.setCompiler(this); return inputsById.put(id, input); } /** * Replace a source input dynamically. Intended for incremental * re-compilation. * * If the new source input doesn't parse, then keep the old input * in the AST and return false. * * @return Whether the new AST was attached successfully. */ boolean replaceIncrementalSourceAst(JsAst ast) { CompilerInput oldInput = getInput(ast.getInputId()); checkNotNull(oldInput, "No input to replace: %s", ast.getInputId().getIdName()); Node newRoot = ast.getAstRoot(this); if (newRoot == null) { return false; } Node oldRoot = oldInput.getAstRoot(this); if (oldRoot != null) { oldRoot.replaceWith(newRoot); } else { getRoot().getLastChild().addChildToBack(newRoot); } CompilerInput newInput = new CompilerInput(ast); putCompilerInput(ast.getInputId(), newInput); JSModule module = oldInput.getModule(); if (module != null) { module.addAfter(newInput, oldInput); module.remove(oldInput); } // Verify the input id is set properly. checkState(newInput.getInputId().equals(oldInput.getInputId())); InputId inputIdOnAst = newInput.getAstRoot(this).getInputId(); checkState(newInput.getInputId().equals(inputIdOnAst)); inputs.remove(oldInput); return true; } /** * Add a new source input dynamically. Intended for incremental compilation. * <p> * If the new source input doesn't parse, it will not be added, and a false * will be returned. * * @param ast the JS Source to add. * @return true if the source was added successfully, false otherwise. * @throws IllegalStateException if an input for this ast already exists. */ boolean addNewSourceAst(JsAst ast) { CompilerInput oldInput = getInput(ast.getInputId()); if (oldInput != null) { throw new IllegalStateException( "Input already exists: " + ast.getInputId().getIdName()); } Node newRoot = ast.getAstRoot(this); if (newRoot == null) { return false; } getRoot().getLastChild().addChildToBack(newRoot); CompilerInput newInput = new CompilerInput(ast); // TODO(tylerg): handle this for multiple modules at some point. if (moduleGraph == null && !modules.isEmpty()) { // singleton module modules.get(0).add(newInput); } putCompilerInput(ast.getInputId(), newInput); return true; } /** * The graph of the JS source modules. * * <p>Must return null if there are less than 2 modules, * because we use this as a signal for which passes to run. * TODO(bradfordcsmith): Just check for a single module instead of null. */ @Override JSModuleGraph getModuleGraph() { if (moduleGraph != null && modules.size() > 1) { return moduleGraph; } else { return null; } } /** * Gets a module graph. This will always return a module graph, even * in the degenerate case when there's only one module. */ JSModuleGraph getDegenerateModuleGraph() { return moduleGraph; } @Override public TypeIRegistry getTypeIRegistry() { switch (mostRecentTypechecker) { case NONE: // Even in compiles where typechecking is not enabled, some passes ask for the // type registry, eg, GatherExternProperties does. Also, in CheckAccessControls, // the constructor asks for a type registry, and this may happen before type checking // runs. So, in the NONE case, if NTI is enabled, return a new registry, since NTI is // the relevant type checker. If NTI is not enabled, return an old registry. return options.getNewTypeInference() ? getSymbolTable() : getTypeRegistry(); case OTI: return getTypeRegistry(); case NTI: return getSymbolTable(); default: throw new RuntimeException("Unhandled typechecker " + mostRecentTypechecker); } } @Override public JSTypeRegistry getTypeRegistry() { if (typeRegistry == null) { typeRegistry = new JSTypeRegistry(oldErrorReporter, forwardDeclaredTypes); } return typeRegistry; } @Override void forwardDeclareType(String typeName) { if (options.allowUnfulfilledForwardDeclarations()) { forwardDeclaredTypes.add(typeName); } } @Override void setMostRecentTypechecker(MostRecentTypechecker lastRun) { this.mostRecentTypechecker = lastRun; } @Override // Only used by jsdev public MemoizedScopeCreator getTypedScopeCreator() { return getPassConfig().getTypedScopeCreator(); } @SuppressWarnings("unchecked") DefaultPassConfig ensureDefaultPassConfig() { PassConfig passes = getPassConfig().getBasePassConfig(); checkState( passes instanceof DefaultPassConfig, "PassConfigs must eventually delegate to the DefaultPassConfig"); return (DefaultPassConfig) passes; } public SymbolTable buildKnownSymbolTable() { SymbolTable symbolTable = new SymbolTable(this, getTypeRegistry()); MemoizedScopeCreator typedScopeCreator = getTypedScopeCreator(); if (typedScopeCreator != null) { symbolTable.addScopes(typedScopeCreator.getAllMemoizedScopes()); symbolTable.addSymbolsFrom(typedScopeCreator); } else { symbolTable.findScopes(externsRoot, jsRoot); } GlobalNamespace globalNamespace = ensureDefaultPassConfig().getGlobalNamespace(); if (globalNamespace != null) { symbolTable.addSymbolsFrom(globalNamespace); } ReferenceCollectingCallback refCollector = new ReferenceCollectingCallback( this, ReferenceCollectingCallback.DO_NOTHING_BEHAVIOR, SyntacticScopeCreator.makeUntyped(this)); refCollector.process(getRoot()); symbolTable.addSymbolsFrom(refCollector); PreprocessorSymbolTable preprocessorSymbolTable = ensureDefaultPassConfig().getPreprocessorSymbolTable(); if (preprocessorSymbolTable != null) { symbolTable.addSymbolsFrom(preprocessorSymbolTable); } symbolTable.fillNamespaceReferences(); symbolTable.fillPropertyScopes(); symbolTable.fillThisReferences(externsRoot, jsRoot); symbolTable.fillPropertySymbols(externsRoot, jsRoot); symbolTable.fillJSDocInfo(externsRoot, jsRoot); symbolTable.fillSymbolVisibility(externsRoot, jsRoot); return symbolTable; } @Override public TypedScope getTopScope() { return getPassConfig().getTopScope(); } @Override public ReverseAbstractInterpreter getReverseAbstractInterpreter() { if (abstractInterpreter == null) { ChainableReverseAbstractInterpreter interpreter = new SemanticReverseAbstractInterpreter(getTypeRegistry()); if (options.closurePass) { interpreter = new ClosureReverseAbstractInterpreter(getTypeRegistry()) .append(interpreter).getFirst(); } abstractInterpreter = interpreter; } return abstractInterpreter; } @Override // Only used by passes in the old type checker. TypeValidator getTypeValidator() { if (typeValidator == null) { typeValidator = new TypeValidator(this); } return typeValidator; } @Override Iterable<TypeMismatch> getTypeMismatches() { switch (this.mostRecentTypechecker) { case OTI: return getTypeValidator().getMismatches(); case NTI: return getSymbolTable().getMismatches(); default: throw new RuntimeException("Can't ask for type mismatches before type checking."); } } @Override Iterable<TypeMismatch> getImplicitInterfaceUses() { switch (this.mostRecentTypechecker) { case OTI: return getTypeValidator().getImplicitInterfaceUses(); case NTI: return getSymbolTable().getImplicitInterfaceUses(); default: throw new RuntimeException("Can't ask for type mismatches before type checking."); } } @Override GlobalTypeInfo getSymbolTable() { if (this.symbolTable == null) { this.symbolTable = new GlobalTypeInfo(this, forwardDeclaredTypes); } return this.symbolTable; } @Override DefinitionUseSiteFinder getDefinitionFinder() { return this.defFinder; } @Override void setDefinitionFinder(DefinitionUseSiteFinder defFinder) { this.defFinder = defFinder; } //------------------------------------------------------------------------ // Reading //------------------------------------------------------------------------ /** * Performs all externs and main inputs IO. * * <p>Allows for easy measurement of IO cost separately from parse cost. */ void readInputs() { checkState(!hasErrors()); checkNotNull(externs); checkNotNull(inputs); if (options.getTracerMode().isOn()) { tracker = new PerformanceTracker(externsRoot, jsRoot, options.getTracerMode(), this.outStream); addChangeHandler(tracker.getCodeChangeHandler()); } Tracer tracer = newTracer(READING_PASS_NAME); beforePass(READING_PASS_NAME); try { for (CompilerInput input : Iterables.concat(externs, inputs)) { try { input.getCode(); } catch (IOException e) { report(JSError.make(AbstractCompiler.READ_ERROR, input.getName())); } } } finally { afterPass(READING_PASS_NAME); stopTracer(tracer, READING_PASS_NAME); } } //------------------------------------------------------------------------ // Parsing //------------------------------------------------------------------------ /** * Parses the externs and main inputs. * * @return A synthetic root node whose two children are the externs root * and the main root */ Node parseInputs() { boolean devMode = options.devMode != DevMode.OFF; // If old roots exist (we are parsing a second time), detach each of the // individual file parse trees. externsRoot.detachChildren(); jsRoot.detachChildren(); Tracer tracer = newTracer(PARSING_PASS_NAME); beforePass(PARSING_PASS_NAME); try { // Parse externs sources. if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(externs); } for (CompilerInput input : externs) { Node n = input.getAstRoot(this); if (hasErrors()) { return null; } externsRoot.addChildToBack(n); } if (options.lowerFromEs6() || options.transformAMDToCJSModules || options.processCommonJSModules) { this.moduleLoader = new ModuleLoader( this, options.moduleRoots, inputs, ModuleLoader.PathResolver.RELATIVE, options.moduleResolutionMode, null); if (options.moduleResolutionMode == ModuleLoader.ResolutionMode.NODE) { // processJsonInputs requires a module loader to already be defined // so we redefine it afterwards with the package.json inputs this.moduleLoader = new ModuleLoader( this, options.moduleRoots, inputs, ModuleLoader.PathResolver.RELATIVE, options.moduleResolutionMode, processJsonInputs(inputs)); } if (options.lowerFromEs6()) { processEs6Modules(); } // Modules inferred in ProcessCommonJS pass. if (options.transformAMDToCJSModules || options.processCommonJSModules) { processAMDAndCommonJSModules(); } // Build a map of module identifiers for any input which provides no namespace. // These files could be imported modules which have no exports, but do have side effects. Map<String, CompilerInput> inputModuleIdentifiers = new HashMap<>(); for (CompilerInput input : inputs) { if (input.getKnownProvides().isEmpty()) { ModuleLoader.ModulePath modPath = moduleLoader.resolve(input.getSourceFile().getOriginalPath()); inputModuleIdentifiers.put(modPath.toModuleName(), input); } } // Find out if any input attempted to import a module that had no exports. // In this case we must force module rewriting to occur on the imported file Map<String, CompilerInput> inputsToRewrite = new HashMap<>(); for (CompilerInput input : inputs) { for (String require : input.getKnownRequires()) { if (inputModuleIdentifiers.containsKey(require) && !inputsToRewrite.containsKey(require)) { inputsToRewrite.put(require, inputModuleIdentifiers.get(require)); } } } if (!inputsToRewrite.isEmpty()) { processEs6Modules(new ArrayList<>(inputsToRewrite.values()), true); } } else { // Use an empty module loader if we're not actually dealing with modules. this.moduleLoader = ModuleLoader.EMPTY; } orderInputs(); // If in IDE mode, we ignore the error and keep going. if (hasErrors()) { return null; } // Build the AST. if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(inputs); } for (CompilerInput input : inputs) { Node n = input.getAstRoot(this); if (n == null) { continue; } if (devMode) { runSanityCheck(); if (hasErrors()) { return null; } } // TODO(johnlenz): we shouldn't need to check both isExternExportsEnabled and // externExportsPath. if (options.sourceMapOutputPath != null || options.isExternExportsEnabled() || options.externExportsPath != null || !options.replaceStringsFunctionDescriptions.isEmpty()) { // Annotate the nodes in the tree with information from the // input file. This information is used to construct the SourceMap. SourceInformationAnnotator sia = new SourceInformationAnnotator( input.getName(), options.devMode != DevMode.OFF); NodeTraversal.traverseEs6(this, n, sia); } jsRoot.addChildToBack(n); } if (hasErrors()) { return null; } return externAndJsRoot; } finally { afterPass(PARSING_PASS_NAME); stopTracer(tracer, PARSING_PASS_NAME); } } void orderInputsWithLargeStack() { runInCompilerThread(new Callable<Void>() { @Override public Void call() throws Exception { Tracer tracer = newTracer("orderInputsWithLargeStack"); try { orderInputs(); } finally { stopTracer(tracer, "orderInputsWithLargeStack"); } return null; } }); } void orderInputs() { hoistUnorderedExterns(); // Check if the sources need to be re-ordered. boolean staleInputs = false; if (options.dependencyOptions.needsManagement()) { for (CompilerInput input : inputs) { // Forward-declare all the provided types, so that they // are not flagged even if they are dropped from the process. for (String provide : input.getProvides()) { forwardDeclareType(provide); } } try { inputs = getDegenerateModuleGraph().manageDependencies(options.dependencyOptions, inputs); staleInputs = true; } catch (MissingProvideException e) { report(JSError.make( MISSING_ENTRY_ERROR, e.getMessage())); } catch (JSModuleGraph.MissingModuleException e) { report(JSError.make( MISSING_MODULE_ERROR, e.getMessage())); } } if (options.dependencyOptions.needsManagement() && options.allowGoogProvideInExterns()) { hoistAllExterns(); } hoistNoCompileFiles(); if (staleInputs) { repartitionInputs(); } } /** * Hoists inputs with the @externs annotation and no provides or requires into the externs list. */ void hoistUnorderedExterns() { boolean staleInputs = false; for (CompilerInput input : inputs) { if (options.dependencyOptions.needsManagement()) { // If we're doing scanning dependency info anyway, use that // information to skip sources that obviously aren't externs. if (!input.getProvides().isEmpty() || !input.getRequires().isEmpty()) { continue; } } if (hoistIfExtern(input)) { staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } /** * Hoists inputs with the @externs annotation into the externs list. */ void hoistAllExterns() { boolean staleInputs = false; for (CompilerInput input : inputs) { if (hoistIfExtern(input)) { staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } /** * Hoists a compiler input to externs if it contains the @externs annotation. * Return whether or not the given input was hoisted. */ private boolean hoistIfExtern(CompilerInput input) { Node n = input.getAstRoot(this); // Inputs can have a null AST on a parse error. if (n == null) { return false; } JSDocInfo info = n.getJSDocInfo(); if (info != null && info.isExterns()) { // If the input file is explicitly marked as an externs file, then // assume the programmer made a mistake and throw it into // the externs pile anyways. externsRoot.addChildToBack(n); input.setIsExtern(true); input.getModule().remove(input); externs.add(input); return true; } return false; } /** * Hoists inputs with the @nocompile annotation out of the inputs. */ void hoistNoCompileFiles() { boolean staleInputs = false; for (CompilerInput input : inputs) { Node n = input.getAstRoot(this); // Inputs can have a null AST on a parse error. if (n == null) { continue; } JSDocInfo info = n.getJSDocInfo(); if (info != null && info.isNoCompile()) { input.getModule().remove(input); staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } private void repartitionInputs() { fillEmptyModules(modules); rebuildInputsFromModules(); } /** * Transforms JSON files to a module export that closure compiler can process and keeps track of * any "main" entries in package.json files. */ Map<String, String> processJsonInputs(List<CompilerInput> inputsToProcess) { RewriteJsonToModule rewriteJson = new RewriteJsonToModule(this); for (CompilerInput input : inputsToProcess) { if (!input.getSourceFile().getOriginalPath().endsWith(".json")) { continue; } input.setCompiler(this); try { // JSON objects need wrapped in parens to parse properly input.getSourceFile().setCode("(" + input.getSourceFile().getCode() + ")"); } catch (IOException e) { continue; } Node root = input.getAstRoot(this); if (root == null) { continue; } rewriteJson.process(null, root); } return rewriteJson.getPackageJsonMainEntries(); } void processEs6Modules() { processEs6Modules(inputs, false); } void processEs6Modules(List<CompilerInput> inputsToProcess, boolean forceRewrite) { List<CompilerInput> filteredInputs = new ArrayList<>(); for (CompilerInput input : inputsToProcess) { // Only process files that are detected as ES6 modules or forced to be rewritten if (forceRewrite || !options.dependencyOptions.shouldPruneDependencies() || !JsFileParser.isSupported() || (input.getLoadFlags().containsKey("module") && input.getLoadFlags().get("module").equals("es6"))) { filteredInputs.add(input); } } if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(filteredInputs); } for (CompilerInput input : filteredInputs) { input.setCompiler(this); Node root = input.getAstRoot(this); if (root == null) { continue; } new ProcessEs6Modules(this).processFile(root, forceRewrite); } } /** * Transforms AMD and CJS modules to something closure compiler can * process and creates JSModules and the corresponding dependency tree * on the way. */ void processAMDAndCommonJSModules() { for (CompilerInput input : inputs) { input.setCompiler(this); Node root = input.getAstRoot(this); if (root == null) { continue; } if (options.transformAMDToCJSModules) { new TransformAMDToCJSModule(this).process(null, root); } if (options.processCommonJSModules) { ProcessCommonJSModules cjs = new ProcessCommonJSModules(this, true); cjs.process(null, root); } } } public Node parse(SourceFile file) { initCompilerOptionsIfTesting(); addToDebugLog("Parsing: " + file.getName()); return new JsAst(file).getAstRoot(this); } /** * Allow subclasses to override the default CompileOptions object. */ protected CompilerOptions newCompilerOptions() { return new CompilerOptions(); } void initCompilerOptionsIfTesting() { if (options == null) { // initialization for tests that don't initialize the compiler // by the normal mechanisms. initOptions(newCompilerOptions()); } } private int syntheticCodeId = 0; @Override Node parseSyntheticCode(String js) { return parseSyntheticCode(" [synthetic:" + (++syntheticCodeId) + "] ", js); } @Override Node parseSyntheticCode(String fileName, String js) { initCompilerOptionsIfTesting(); SourceFile source = SourceFile.fromCode(fileName, js); addFilesToSourceMap(ImmutableList.of(source)); return parseCodeHelper(source); } @Override @VisibleForTesting Node parseTestCode(String js) { initCompilerOptionsIfTesting(); initBasedOnOptions(); return parseCodeHelper(SourceFile.fromCode("[testcode]", js)); } private Node parseCodeHelper(SourceFile src) { CompilerInput input = new CompilerInput(src); putCompilerInput(input.getInputId(), input); return input.getAstRoot(this); } @Override ErrorReporter getDefaultErrorReporter() { return oldErrorReporter; } //------------------------------------------------------------------------ // Convert back to source code //------------------------------------------------------------------------ /** * Converts the main parse tree back to JS code. */ @Override public String toSource() { return runInCompilerThread(new Callable<String>() { @Override public String call() throws Exception { Tracer tracer = newTracer("toSource"); try { CodeBuilder cb = new CodeBuilder(); if (jsRoot != null) { int i = 0; for (Node scriptNode = jsRoot.getFirstChild(); scriptNode != null; scriptNode = scriptNode.getNext()) { toSource(cb, i++, scriptNode); } } return cb.toString(); } finally { stopTracer(tracer, "toSource"); } } }); } /** * Converts the parse tree for each input back to JS code. */ public String[] toSourceArray() { return runInCompilerThread(new Callable<String[]>() { @Override public String[] call() throws Exception { Tracer tracer = newTracer("toSourceArray"); try { int numInputs = inputs.size(); String[] sources = new String[numInputs]; CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); cb.reset(); toSource(cb, i, scriptNode); sources[i] = cb.toString(); } return sources; } finally { stopTracer(tracer, "toSourceArray"); } } }); } /** * Converts the parse tree for a module back to JS code. */ public String toSource(final JSModule module) { return runInCompilerThread(new Callable<String>() { @Override public String call() throws Exception { List<CompilerInput> inputs = module.getInputs(); int numInputs = inputs.size(); if (numInputs == 0) { return ""; } CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); if (scriptNode == null) { throw new IllegalArgumentException( "Bad module: " + module.getName()); } toSource(cb, i, scriptNode); } return cb.toString(); } }); } /** * Converts the parse tree for each input in a module back to JS code. */ public String[] toSourceArray(final JSModule module) { return runInCompilerThread(new Callable<String[]>() { @Override public String[] call() throws Exception { List<CompilerInput> inputs = module.getInputs(); int numInputs = inputs.size(); if (numInputs == 0) { return new String[0]; } String[] sources = new String[numInputs]; CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); if (scriptNode == null) { throw new IllegalArgumentException( "Bad module input: " + inputs.get(i).getName()); } cb.reset(); toSource(cb, i, scriptNode); sources[i] = cb.toString(); } return sources; } }); } /** * Writes out JS code from a root node. If printing input delimiters, this * method will attach a comment to the start of the text indicating which * input the output derived from. If there were any preserve annotations * within the root's source, they will also be printed in a block comment * at the beginning of the output. */ public void toSource(final CodeBuilder cb, final int inputSeqNum, final Node root) { runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { if (options.printInputDelimiter) { if ((cb.getLength() > 0) && !cb.endsWith("\n")) { cb.append("\n"); // Make sure that the label starts on a new line } checkState(root.isScript()); String delimiter = options.inputDelimiter; String inputName = root.getInputId().getIdName(); String sourceName = root.getSourceFileName(); checkState(sourceName != null); checkState(!sourceName.isEmpty()); delimiter = delimiter .replaceAll("%name%", Matcher.quoteReplacement(inputName)) .replaceAll("%num%", String.valueOf(inputSeqNum)); cb.append(delimiter).append("\n"); } if (root.getJSDocInfo() != null) { String license = root.getJSDocInfo().getLicense(); if (license != null && cb.addLicense(license)) { cb.append("/*\n").append(license).append("*/\n"); } } // If there is a valid source map, then indicate to it that the current // root node's mappings are offset by the given string builder buffer. if (options.sourceMapOutputPath != null) { sourceMap.setStartingPosition(cb.getLineIndex(), cb.getColumnIndex()); } // if LanguageMode is strict, only print 'use strict' // for the first input file String code = toSource(root, sourceMap, inputSeqNum == 0); if (!code.isEmpty()) { cb.append(code); // In order to avoid parse ambiguity when files are concatenated // together, all files should end in a semi-colon. Do a quick // heuristic check if there's an obvious semi-colon already there. int length = code.length(); char lastChar = code.charAt(length - 1); char secondLastChar = length >= 2 ? code.charAt(length - 2) : '\0'; boolean hasSemiColon = lastChar == ';' || (lastChar == '\n' && secondLastChar == ';'); if (!hasSemiColon) { cb.append(";"); } } return null; } }); } /** * Generates JavaScript source code for an AST, doesn't generate source * map info. */ @Override public String toSource(Node n) { initCompilerOptionsIfTesting(); return toSource(n, null, true); } /** * Generates JavaScript source code for an AST. */ private String toSource(Node n, SourceMap sourceMap, boolean firstOutput) { CodePrinter.Builder builder = new CodePrinter.Builder(n); builder.setTypeRegistry(this.typeRegistry); builder.setCompilerOptions(options); builder.setSourceMap(sourceMap); builder.setTagAsExterns(firstOutput && options.shouldGenerateTypedExterns()); builder.setTagAsStrict(firstOutput && shouldEmitUseStrict()); return builder.build(); } private boolean shouldEmitUseStrict() { switch (options.getLanguageOut()) { case ECMASCRIPT3: case ECMASCRIPT5: case ECMASCRIPT6: return false; default: return options.isEmitUseStrict(); } } /** * Stores a buffer of text to which more can be appended. This is just like a * StringBuilder except that we also track the number of lines. */ public static class CodeBuilder { private final StringBuilder sb = new StringBuilder(); private int lineCount = 0; private int colCount = 0; private final Set<String> uniqueLicenses = new HashSet<>(); /** Removes all text, but leaves the line count unchanged. */ void reset() { sb.setLength(0); } /** Appends the given string to the text buffer. */ CodeBuilder append(String str) { sb.append(str); // Adjust the line and column information for the new text. int index = -1; int lastIndex = index; while ((index = str.indexOf('\n', index + 1)) >= 0) { ++lineCount; lastIndex = index; } if (lastIndex == -1) { // No new lines, append the new characters added. colCount += str.length(); } else { colCount = str.length() - (lastIndex + 1); } return this; } /** Returns all text in the text buffer. */ @Override public String toString() { return sb.toString(); } /** Returns the length of the text buffer. */ public int getLength() { return sb.length(); } /** Returns the (zero-based) index of the last line in the text buffer. */ int getLineIndex() { return lineCount; } /** Returns the (zero-based) index of the last column in the text buffer. */ int getColumnIndex() { return colCount; } /** Determines whether the text ends with the given suffix. */ boolean endsWith(String suffix) { return (sb.length() > suffix.length()) && suffix.equals(sb.substring(sb.length() - suffix.length())); } /** Adds a license and returns whether it is unique (has yet to be encountered). */ boolean addLicense(String license) { return uniqueLicenses.add(license); } } //------------------------------------------------------------------------ // Optimizations //------------------------------------------------------------------------ void performOptimizations() { checkState(options.shouldOptimize()); List<PassFactory> optimizations = getPassConfig().getOptimizations(); if (optimizations.isEmpty()) { return; } phaseOptimizer = createPhaseOptimizer(); phaseOptimizer.consume(optimizations); phaseOptimizer.process(externsRoot, jsRoot); phaseOptimizer = null; } @Override void setCssRenamingMap(CssRenamingMap map) { options.cssRenamingMap = map; } @Override CssRenamingMap getCssRenamingMap() { return options.cssRenamingMap; } /** Control Flow Analysis. */ ControlFlowGraph<Node> computeCFG() { logger.fine("Computing Control Flow Graph"); Tracer tracer = newTracer("computeCFG"); ControlFlowAnalysis cfa = new ControlFlowAnalysis(this, true, false); process(cfa); stopTracer(tracer, "computeCFG"); return cfa.getCfg(); } @Override void prepareAst(Node root) { CompilerPass pass = new PrepareAst(this); pass.process(null, root); } void recordFunctionInformation() { logger.fine("Recording function information"); startPass("recordFunctionInformation"); RecordFunctionInformation recordFunctionInfoPass = new RecordFunctionInformation( this, getPassConfig().getIntermediateState().functionNames); process(recordFunctionInfoPass); functionInformationMap = recordFunctionInfoPass.getMap(); endPass("recordFunctionInformation"); } protected final RecentChange recentChange = new RecentChange(); private final List<CodeChangeHandler> codeChangeHandlers = new ArrayList<>(); /** Name of the synthetic input that holds synthesized externs. */ static final String SYNTHETIC_EXTERNS = "{SyntheticVarsDeclar}"; /** * Name of the synthetic input that holds synthesized externs which * must be at the end of the externs AST. */ static final String SYNTHETIC_EXTERNS_AT_END = "{SyntheticVarsAtEnd}"; private CompilerInput synthesizedExternsInput = null; private CompilerInput synthesizedExternsInputAtEnd = null; private ImmutableMap<String, Node> defaultDefineValues = ImmutableMap.of(); @Override void addChangeHandler(CodeChangeHandler handler) { codeChangeHandlers.add(handler); } @Override void removeChangeHandler(CodeChangeHandler handler) { codeChangeHandlers.remove(handler); } Node getExternsRoot() { return externsRoot; } @Override Node getJsRoot() { return jsRoot; } /** * Some tests don't want to call the compiler "wholesale," they may not want * to call check and/or optimize. With this method, tests can execute custom * optimization loops. */ @VisibleForTesting void setPhaseOptimizer(PhaseOptimizer po) { this.phaseOptimizer = po; } @Override public int getChangeStamp() { return changeStamp; } @Override public void incrementChangeStamp() { changeStamp++; } @Override void setChangeScope(Node newChangeScopeRoot) { currentChangeScope = newChangeScopeRoot; } private Node getChangeScopeForNode(Node n) { /** * Compiler change reporting usually occurs after the AST change has already occurred. In the * case of node removals those nodes are already removed from the tree and so have no parent * chain to walk. In these situations changes are reported instead against what (used to be) * their parent. If that parent is itself a script node then it's important to be able to * recognize it as the enclosing scope without first stepping to its parent as well. */ if (n.isScript()) { return n; } n = NodeUtil.getEnclosingChangeScopeRoot(n.getParent()); if (n == null) { throw new IllegalStateException( "An enclosing scope is required for change reports but node " + n + " doesn't have one."); } return n; } private void recordChange(Node n) { n.setChangeTime(changeStamp); // Every code change happens at a different time changeStamp++; } @Override boolean hasScopeChanged(Node n) { if (phaseOptimizer == null) { return true; } return phaseOptimizer.hasScopeChanged(n); } /** * @deprecated * Use #reportChangeToEnclosingScope or NodeTraversal#reportCodeChange instead */ @Deprecated @Override public void reportCodeChange() { // TODO(johnlenz): if this is called with a null scope we need to invalidate everything // but this isn't done, so we need to make this illegal or record this as having // invalidated everything. if (currentChangeScope != null) { checkState(currentChangeScope.isScript() || currentChangeScope.isFunction()); recordChange(currentChangeScope); } notifyChangeHandlers(); } @Override public void reportChangeToChangeScope(Node changeScopeRoot) { checkState(changeScopeRoot.isScript() || changeScopeRoot.isFunction()); recordChange(changeScopeRoot); notifyChangeHandlers(); } @Override void reportChangeToEnclosingScope(Node n) { recordChange(getChangeScopeForNode(n)); notifyChangeHandlers(); } private void notifyChangeHandlers() { for (CodeChangeHandler handler : codeChangeHandlers) { handler.reportChange(); } } @Override public CodingConvention getCodingConvention() { CodingConvention convention = options.getCodingConvention(); convention = convention != null ? convention : defaultCodingConvention; return convention; } private Config.LanguageMode getParserConfigLanguageMode( CompilerOptions.LanguageMode languageMode) { switch (languageMode) { case ECMASCRIPT3: return Config.LanguageMode.ECMASCRIPT3; case ECMASCRIPT5: case ECMASCRIPT5_STRICT: return Config.LanguageMode.ECMASCRIPT5; case ECMASCRIPT6: case ECMASCRIPT6_STRICT: case ECMASCRIPT_2015: return Config.LanguageMode.ECMASCRIPT6; case ECMASCRIPT6_TYPED: return Config.LanguageMode.TYPESCRIPT; case ECMASCRIPT7: case ECMASCRIPT_2016: return Config.LanguageMode.ECMASCRIPT7; case ECMASCRIPT8: case ECMASCRIPT_2017: case ECMASCRIPT_NEXT: return Config.LanguageMode.ECMASCRIPT8; default: throw new IllegalStateException("Unexpected language mode: " + options.getLanguageIn()); } } @Override Config getParserConfig(ConfigContext context) { if (parserConfig == null || externsParserConfig == null) { synchronized (this) { if (parserConfig == null) { Config.LanguageMode configLanguageMode = getParserConfigLanguageMode( options.getLanguageIn()); Config.StrictMode strictMode = expectStrictModeInput() ? Config.StrictMode.STRICT : Config.StrictMode.SLOPPY; parserConfig = createConfig(configLanguageMode, strictMode); // Externs must always be parsed with at least ES5 language mode. externsParserConfig = configLanguageMode.equals(Config.LanguageMode.ECMASCRIPT3) ? createConfig(Config.LanguageMode.ECMASCRIPT5, strictMode) : parserConfig; } } } switch (context) { case EXTERNS: return externsParserConfig; default: return parserConfig; } } protected Config createConfig(Config.LanguageMode mode, Config.StrictMode strictMode) { Config config = ParserRunner.createConfig( mode, options.isParseJsDocDocumentation(), options.canContinueAfterErrors() ? Config.RunMode.KEEP_GOING : Config.RunMode.STOP_AFTER_ERROR, options.extraAnnotationNames, options.parseInlineSourceMaps, strictMode); return config; } //------------------------------------------------------------------------ // Error reporting //------------------------------------------------------------------------ /** * The warning classes that are available from the command-line, and * are suppressible by the {@code @suppress} annotation. */ protected DiagnosticGroups getDiagnosticGroups() { return new DiagnosticGroups(); } @Override public void report(JSError error) { CheckLevel level = error.getDefaultLevel(); if (warningsGuard != null) { CheckLevel newLevel = warningsGuard.level(error); if (newLevel != null) { level = newLevel; } } if (level.isOn()) { initCompilerOptionsIfTesting(); if (getOptions().errorHandler != null) { getOptions().errorHandler.report(level, error); } errorManager.report(level, error); } } @Override public void report(CheckLevel ignoredLevel, JSError error) { report(error); } @Override public CheckLevel getErrorLevel(JSError error) { checkNotNull(options); return warningsGuard.level(error); } /** * Report an internal error. */ @Override void throwInternalError(String message, Exception cause) { String finalMessage = "INTERNAL COMPILER ERROR.\n" + "Please report this problem.\n\n" + message; RuntimeException e = new RuntimeException(finalMessage, cause); if (cause != null) { e.setStackTrace(cause.getStackTrace()); } throw e; } /** * Gets the number of errors. */ public int getErrorCount() { return errorManager.getErrorCount(); } /** * Gets the number of warnings. */ public int getWarningCount() { return errorManager.getWarningCount(); } @Override boolean hasHaltingErrors() { return !getOptions().canContinueAfterErrors() && getErrorCount() > 0; } /** * Consults the {@link ErrorManager} to see if we've encountered errors * that should halt compilation. <p> * * If {@link CompilerOptions#canContinueAfterErrors} is {@code true}, this function * always returns {@code false} without consulting the error manager. The * error manager will continue to be told about new errors and warnings, but * the compiler will complete compilation of all inputs.<p> */ public boolean hasErrors() { return hasHaltingErrors(); } /** Called from the compiler passes, adds debug info */ @Override void addToDebugLog(String str) { if (options.useDebugLog) { debugLog.append(str); debugLog.append('\n'); logger.fine(str); } } @Override SourceFile getSourceFileByName(String sourceName) { // Here we assume that the source name is the input name, this // is try of JavaScript parsed from source. if (sourceName != null) { CompilerInput input = inputsById.get(new InputId(sourceName)); if (input != null) { return input.getSourceFile(); } // Alternatively, the sourceName might have been reverse-mapped by // an input source-map, so let's look in our sourcemap original sources. return sourceMapOriginalSources.get(sourceName); } return null; } public CharSequence getSourceFileContentByName(String sourceName) { SourceFile file = getSourceFileByName(sourceName); checkNotNull(file); try { return file.getCode(); } catch (IOException e) { return null; } } @Override public void addInputSourceMap(String sourceFileName, SourceMapInput inputSourceMap) { inputSourceMaps.put(sourceFileName, inputSourceMap); } @Override public OriginalMapping getSourceMapping(String sourceName, int lineNumber, int columnNumber) { if (sourceName == null) { return null; } SourceMapInput sourceMap = inputSourceMaps.get(sourceName); if (sourceMap == null) { return null; } // JSCompiler uses 1-indexing for lineNumber and 0-indexing for // columnNumber. // SourceMap uses 1-indexing for both. OriginalMapping result = sourceMap.getSourceMap() .getMappingForLine(lineNumber, columnNumber + 1); if (result == null) { return null; } // The sourcemap will return a path relative to the sourcemap's file. // Translate it to one relative to our base directory. String path = getRelativeTo(result.getOriginalFile(), sourceMap.getOriginalPath()); sourceMapOriginalSources.putIfAbsent(path, originalSourcesLoader.loadSource(path)); return result.toBuilder() .setOriginalFile(path) .setColumnPosition(result.getColumnPosition() - 1) .build(); } @Override public String getSourceLine(String sourceName, int lineNumber) { if (lineNumber < 1) { return null; } SourceFile input = getSourceFileByName(sourceName); if (input != null) { return input.getLine(lineNumber); } return null; } @Override public Region getSourceRegion(String sourceName, int lineNumber) { if (lineNumber < 1) { return null; } SourceFile input = getSourceFileByName(sourceName); if (input != null) { return input.getRegion(lineNumber); } return null; } //------------------------------------------------------------------------ // Package-private helpers //------------------------------------------------------------------------ @Override Node getNodeForCodeInsertion(JSModule module) { if (module == null) { if (inputs.isEmpty()) { throw new IllegalStateException("No inputs"); } return inputs.get(0).getAstRoot(this); } List<CompilerInput> moduleInputs = module.getInputs(); if (!moduleInputs.isEmpty()) { return moduleInputs.get(0).getAstRoot(this); } throw new IllegalStateException("Root module has no inputs"); } public SourceMap getSourceMap() { return sourceMap; } VariableMap getVariableMap() { return getPassConfig().getIntermediateState().variableMap; } VariableMap getPropertyMap() { return getPassConfig().getIntermediateState().propertyMap; } VariableMap getStringMap() { return getPassConfig().getIntermediateState().stringMap; } @Override CompilerOptions getOptions() { return options; } FunctionInformationMap getFunctionalInformationMap() { return functionInformationMap; } /** * Sets the logging level for the com.google.javascript.jscomp package. */ public static void setLoggingLevel(Level level) { logger.setLevel(level); } /** Gets the DOT graph of the AST generated at the end of compilation. */ public String getAstDotGraph() throws IOException { if (jsRoot != null) { ControlFlowAnalysis cfa = new ControlFlowAnalysis(this, true, false); cfa.process(null, jsRoot); return DotFormatter.toDot(jsRoot, cfa.getCfg()); } else { return ""; } } @Override public ErrorManager getErrorManager() { if (options == null) { initOptions(new CompilerOptions()); } return errorManager; } @Override List<CompilerInput> getInputsInOrder() { return Collections.unmodifiableList(inputs); } /** * Returns an unmodifiable view of the compiler inputs indexed by id. */ public Map<InputId, CompilerInput> getInputsById() { return Collections.unmodifiableMap(inputsById); } /** * Gets the externs in the order in which they are being processed. */ List<CompilerInput> getExternsInOrder() { return Collections.unmodifiableList(externs); } @VisibleForTesting List<CompilerInput> getInputsForTesting() { return inputs; } @VisibleForTesting List<CompilerInput> getExternsForTesting() { return externs; } @Override boolean hasRegExpGlobalReferences() { return hasRegExpGlobalReferences; } @Override void setHasRegExpGlobalReferences(boolean references) { hasRegExpGlobalReferences = references; } @Override void updateGlobalVarReferences(Map<Var, ReferenceCollection> refMapPatch, Node collectionRoot) { checkState(collectionRoot.isScript() || collectionRoot.isRoot()); if (globalRefMap == null) { globalRefMap = new GlobalVarReferenceMap(getInputsInOrder(), getExternsInOrder()); } globalRefMap.updateGlobalVarReferences(refMapPatch, collectionRoot); } @Override GlobalVarReferenceMap getGlobalVarReferences() { return globalRefMap; } @Override CompilerInput getSynthesizedExternsInput() { if (synthesizedExternsInput == null) { synthesizedExternsInput = newExternInput(SYNTHETIC_EXTERNS, SyntheticExternsPosition.START); } return synthesizedExternsInput; } @Override CompilerInput getSynthesizedExternsInputAtEnd() { if (synthesizedExternsInputAtEnd == null) { synthesizedExternsInputAtEnd = newExternInput( SYNTHETIC_EXTERNS_AT_END, SyntheticExternsPosition.END); } return synthesizedExternsInputAtEnd; } @Override public double getProgress() { return progress; } @Override String getLastPassName() { return lastPassName; } @Override void setProgress(double newProgress, String passName) { this.lastPassName = passName; if (newProgress > 1.0) { progress = 1.0; } else { progress = newProgress; } } @Override void setExternProperties(Set<String> externProperties) { this.externProperties = externProperties; } @Override Set<String> getExternProperties() { return externProperties; } /** * Replaces one file in a hot-swap mode. The given JsAst should be made * from a new version of a file that already was present in the last compile * call. If the file is new, this will silently ignored. * * @param ast the ast of the file that is being replaced */ public void replaceScript(JsAst ast) { CompilerInput input = this.getInput(ast.getInputId()); if (!replaceIncrementalSourceAst(ast)) { return; } Node originalRoot = input.getAstRoot(this); processNewScript(ast, originalRoot); } /** * Adds a new Script AST to the compile state. If a script for the same file * already exists the script will not be added, instead a call to * #replaceScript should be used. * * @param ast the ast of the new file */ public void addNewScript(JsAst ast) { if (!addNewSourceAst(ast)) { return; } Node emptyScript = new Node(Token.SCRIPT); InputId inputId = ast.getInputId(); emptyScript.setInputId(inputId); emptyScript.setStaticSourceFile( SourceFile.fromCode(inputId.getIdName(), "")); processNewScript(ast, emptyScript); } private void processNewScript(JsAst ast, Node originalRoot) { languageMode = options.getLanguageIn(); Node js = ast.getAstRoot(this); checkNotNull(js); runHotSwap(originalRoot, js, this.getCleanupPassConfig()); // NOTE: If hot swap passes that use GlobalNamespace are added, we will need // to revisit this approach to clearing GlobalNamespaces runHotSwapPass(null, null, ensureDefaultPassConfig().garbageCollectChecks); this.getTypeRegistry().clearNamedTypes(); this.removeSyntheticVarsInput(); runHotSwap(originalRoot, js, this.ensureDefaultPassConfig()); } /** * Execute the passes from a PassConfig instance over a single replaced file. */ private void runHotSwap( Node originalRoot, Node js, PassConfig passConfig) { for (PassFactory passFactory : passConfig.getChecks()) { runHotSwapPass(originalRoot, js, passFactory); } } private void runHotSwapPass( Node originalRoot, Node js, PassFactory passFactory) { HotSwapCompilerPass pass = passFactory.getHotSwapPass(this); if (pass != null) { if (logger.isLoggable(Level.INFO)) { logger.info("Performing HotSwap for pass " + passFactory.getName()); } pass.hotSwapScript(js, originalRoot); } } private PassConfig getCleanupPassConfig() { return new CleanupPasses(getOptions()); } private void removeSyntheticVarsInput() { String sourceName = Compiler.SYNTHETIC_EXTERNS; removeExternInput(new InputId(sourceName)); } @Override Node ensureLibraryInjected(String resourceName, boolean force) { boolean doNotInject = !force && (options.skipNonTranspilationPasses || options.preventLibraryInjection); if (injectedLibraries.containsKey(resourceName) || doNotInject) { return lastInjectedLibrary; } // Load/parse the code. String originalCode = ResourceLoader.loadTextResource( Compiler.class, "js/" + resourceName + ".js"); Node ast = parseSyntheticCode(" [synthetic:" + resourceName + "] ", originalCode); // Look for string literals of the form 'require foo bar' or 'externs baz' or 'normalize'. // As we process each one, remove it from its parent. for (Node node = ast.getFirstChild(); node != null && node.isExprResult() && node.getFirstChild().isString(); node = ast.getFirstChild()) { String directive = node.getFirstChild().getString(); List<String> words = Splitter.on(' ').limit(2).splitToList(directive); switch (words.get(0)) { case "use": // 'use strict' is ignored (and deleted). break; case "require": // 'require lib'; pulls in the named library before this one. ensureLibraryInjected(words.get(1), force); break; case "declare": // 'declare name'; adds the name to the externs (with no type information). // Note that we could simply add the entire externs library, but that leads to // potentially-surprising behavior when the externs that are present depend on // whether or not a polyfill is used. Node var = IR.var(IR.name(words.get(1))); JSDocInfoBuilder jsdoc = new JSDocInfoBuilder(false); // Suppress duplicate-var warning in case this name is already defined in the externs. jsdoc.addSuppression("duplicate"); var.setJSDocInfo(jsdoc.build()); getSynthesizedExternsInputAtEnd() .getAstRoot(this) .addChildToBack(var); break; default: throw new RuntimeException("Bad directive: " + directive); } ast.removeChild(node); } // If we've already started optimizations, then we need to normalize this. if (getLifeCycleStage().isNormalized()) { Normalize.normalizeSyntheticCode(this, ast, "jscomp_" + resourceName + "_"); } // Insert the code immediately after the last-inserted runtime library. Node lastChild = ast.getLastChild(); for (Node child = ast.getFirstChild(); child != null; child = child.getNext()) { NodeUtil.markNewScopesChanged(child, this); } Node firstChild = ast.removeChildren(); if (firstChild == null) { // Handle require-only libraries. return lastInjectedLibrary; } Node parent = getNodeForCodeInsertion(null); if (lastInjectedLibrary == null) { parent.addChildrenToFront(firstChild); } else { parent.addChildrenAfter(firstChild, lastInjectedLibrary); } lastInjectedLibrary = lastChild; injectedLibraries.put(resourceName, lastChild); reportChangeToEnclosingScope(parent); return lastChild; } /** Returns the compiler version baked into the jar. */ @GwtIncompatible("java.util.ResourceBundle") public static String getReleaseVersion() { ResourceBundle config = ResourceBundle.getBundle(CONFIG_RESOURCE); return config.getString("compiler.version"); } /** Returns the compiler date baked into the jar. */ @GwtIncompatible("java.util.ResourceBundle") public static String getReleaseDate() { ResourceBundle config = ResourceBundle.getBundle(CONFIG_RESOURCE); return config.getString("compiler.date"); } @Override void addComments(String filename, List<Comment> comments) { if (!getOptions().preservesDetailedSourceInfo()) { throw new UnsupportedOperationException( "addComments may only be called in IDE mode."); } commentsPerFile.put(filename, comments); } @Override public List<Comment> getComments(String filename) { if (!getOptions().preservesDetailedSourceInfo()) { throw new UnsupportedOperationException( "getComments may only be called in IDE mode."); } return commentsPerFile.get(filename); } @Override void setDefaultDefineValues(ImmutableMap<String, Node> values) { this.defaultDefineValues = values; } @Override ImmutableMap<String, Node> getDefaultDefineValues() { return this.defaultDefineValues; } @Override ModuleLoader getModuleLoader() { return moduleLoader; } private void addFilesToSourceMap(Iterable<? extends SourceFile> files) { if (getOptions().sourceMapIncludeSourcesContent && getSourceMap() != null) { for (SourceFile file : files) { getSourceMap().addSourceFile(file); } } } /** * Serializable state of the compiler. */ private static class CompilerState implements Serializable { CompilerOptions options; Node externsRoot; Node jsRoot; Node externAndJsRoot; List<CompilerInput> externs; List<CompilerInput> inputs; Map<InputId, CompilerInput> inputsById; JSTypeRegistry typeRegistry; CompilerState( CompilerOptions options, Node externsRoot, Node jsRoot, Node externAndJsRoot, List<CompilerInput> externs, List<CompilerInput> inputs, Map<InputId, CompilerInput> inputsById, JSTypeRegistry typeRegistry) { this.options = options; this.externsRoot = externsRoot; this.jsRoot = jsRoot; this.externAndJsRoot = externAndJsRoot; this.typeRegistry = typeRegistry; this.externs = externs; this.inputs = inputs; this.inputsById = inputsById; } } @GwtIncompatible("ObjectOutputStream") public void saveState(OutputStream outputStream) throws IOException { CompilerState compilerState = new CompilerState( options, externsRoot, jsRoot, externAndJsRoot, externs, inputs, inputsById, typeRegistry); try (ObjectOutputStream objectOutputStream = new ObjectOutputStream(outputStream)) { objectOutputStream.writeObject(compilerState); } } @GwtIncompatible("ObjectInputStream") public void restoreState(InputStream inputStream) throws Exception { try (ObjectInputStream objectInputStream = new ObjectInputStream(inputStream)) { CompilerState compilerState = (CompilerState) objectInputStream.readObject(); options = compilerState.options; externs = compilerState.externs; inputs = compilerState.inputs; inputsById.clear(); inputsById.putAll(compilerState.inputsById); typeRegistry = compilerState.typeRegistry; externAndJsRoot = compilerState.externAndJsRoot; externsRoot = compilerState.externsRoot; jsRoot = compilerState.jsRoot; } initWarningsGuard(options.getWarningsGuard()); } }
src/com/google/javascript/jscomp/Compiler.java
/* * Copyright 2004 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.debugging.sourcemap.proto.Mapping.OriginalMapping; import com.google.javascript.jscomp.CompilerOptions.DevMode; import com.google.javascript.jscomp.CoverageInstrumentationPass.CoverageReach; import com.google.javascript.jscomp.CoverageInstrumentationPass.InstrumentOption; import com.google.javascript.jscomp.WarningsGuard.DiagnosticGroupState; import com.google.javascript.jscomp.deps.JsFileParser; import com.google.javascript.jscomp.deps.ModuleLoader; import com.google.javascript.jscomp.deps.SortedDependencies.MissingProvideException; import com.google.javascript.jscomp.parsing.Config; import com.google.javascript.jscomp.parsing.ParserRunner; import com.google.javascript.jscomp.parsing.parser.trees.Comment; import com.google.javascript.jscomp.type.ChainableReverseAbstractInterpreter; import com.google.javascript.jscomp.type.ClosureReverseAbstractInterpreter; import com.google.javascript.jscomp.type.ReverseAbstractInterpreter; import com.google.javascript.jscomp.type.SemanticReverseAbstractInterpreter; import com.google.javascript.rhino.ErrorReporter; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.InputId; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TypeIRegistry; import com.google.javascript.rhino.jstype.JSTypeRegistry; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.PrintStream; import java.io.Serializable; import java.nio.file.FileSystems; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; /** * Compiler (and the other classes in this package) does the following: * <ul> * <li>parses JS code * <li>checks for undefined variables * <li>performs optimizations such as constant folding and constants inlining * <li>renames variables (to short names) * <li>outputs compact JavaScript code * </ul> * * External variables are declared in 'externs' files. For instance, the file * may include definitions for global javascript/browser objects such as * window, document. * */ public class Compiler extends AbstractCompiler implements ErrorHandler, SourceFileMapping { static final String SINGLETON_MODULE_NAME = "$singleton$"; static final DiagnosticType MODULE_DEPENDENCY_ERROR = DiagnosticType.error("JSC_MODULE_DEPENDENCY_ERROR", "Bad dependency: {0} -> {1}. " + "Modules must be listed in dependency order."); static final DiagnosticType MISSING_ENTRY_ERROR = DiagnosticType.error( "JSC_MISSING_ENTRY_ERROR", "required entry point \"{0}\" never provided"); static final DiagnosticType MISSING_MODULE_ERROR = DiagnosticType.error( "JSC_MISSING_ENTRY_ERROR", "unknown module \"{0}\" specified in entry point spec"); // Used in PerformanceTracker static final String READING_PASS_NAME = "readInputs"; static final String PARSING_PASS_NAME = "parseInputs"; static final String PEEPHOLE_PASS_NAME = "peepholeOptimizations"; static final String UNREACHABLE_CODE_ELIM_NAME = "removeUnreachableCode"; private static final String CONFIG_RESOURCE = "com.google.javascript.jscomp.parsing.ParserConfig"; CompilerOptions options = null; private PassConfig passes = null; // The externs inputs private List<CompilerInput> externs; // The JS source modules private List<JSModule> modules; private JSModuleGraph moduleGraph; // The module loader for resolving paths into module URIs. private ModuleLoader moduleLoader; // The JS source inputs private List<CompilerInput> inputs; // error manager to which error management is delegated private ErrorManager errorManager; // Warnings guard for filtering warnings. private WarningsGuard warningsGuard; // Compile-time injected libraries. The node points to the last node of // the library, so code can be inserted after. private final Map<String, Node> injectedLibraries = new LinkedHashMap<>(); // Node of the final injected library. Future libraries will be injected // after this node. private Node lastInjectedLibrary; // Parse tree root nodes Node externsRoot; Node jsRoot; Node externAndJsRoot; // Used for debugging; to see the compiled code between passes private String lastJsSource = null; /** @see #getLanguageMode() */ private CompilerOptions.LanguageMode languageMode = CompilerOptions.LanguageMode.ECMASCRIPT3; private final Map<InputId, CompilerInput> inputsById = new ConcurrentHashMap<>(); /** * Subclasses are responsible for loading soures that were not provided as explicit inputs to the * compiler. For example, looking up sources referenced within sourcemaps. */ public static class ExternalSourceLoader { public SourceFile loadSource(String filename) { throw new RuntimeException("Cannot load without a valid loader."); } } private ExternalSourceLoader originalSourcesLoader = new ExternalSourceLoader() { // TODO(tdeegan): The @GwtIncompatible tree needs to be cleaned up. @Override @GwtIncompatible("SourceFile.fromFile") public SourceFile loadSource(String filename) { return SourceFile.fromFile(filename); } }; // Original sources referenced by the source maps. private ConcurrentHashMap<String, SourceFile> sourceMapOriginalSources = new ConcurrentHashMap<>(); /** Configured {@link SourceMapInput}s, plus any source maps discovered in source files. */ private final ConcurrentHashMap<String, SourceMapInput> inputSourceMaps = new ConcurrentHashMap<>(); // Map from filenames to lists of all the comments in each file. private Map<String, List<Comment>> commentsPerFile = new ConcurrentHashMap<>(); /** The source code map */ private SourceMap sourceMap; /** The externs created from the exports. */ private String externExports = null; /** * Ids for function inlining so that each declared name remains * unique. */ private int uniqueNameId = 0; /** * Whether to assume there are references to the RegExp Global object * properties. */ private boolean hasRegExpGlobalReferences = true; /** The function information map */ private FunctionInformationMap functionInformationMap; /** Debugging information */ private final StringBuilder debugLog = new StringBuilder(); /** Detects Google-specific coding conventions. */ CodingConvention defaultCodingConvention = new ClosureCodingConvention(); private JSTypeRegistry typeRegistry; private volatile Config parserConfig = null; private volatile Config externsParserConfig = null; private ReverseAbstractInterpreter abstractInterpreter; private TypeValidator typeValidator; // The compiler can ask phaseOptimizer for things like which pass is currently // running, or which functions have been changed by optimizations private PhaseOptimizer phaseOptimizer = null; public PerformanceTracker tracker; // Used by optimize-returns, optimize-parameters and remove-unused-variables private DefinitionUseSiteFinder defFinder = null; // Types that have been forward declared private Set<String> forwardDeclaredTypes = new HashSet<>(); // For use by the new type inference private GlobalTypeInfo symbolTable; private MostRecentTypechecker mostRecentTypechecker = MostRecentTypechecker.NONE; // This error reporter gets the messages from the current Rhino parser or TypeRegistry. private final ErrorReporter oldErrorReporter = RhinoErrorReporter.forOldRhino(this); /** Error strings used for reporting JSErrors */ public static final DiagnosticType OPTIMIZE_LOOP_ERROR = DiagnosticType.error( "JSC_OPTIMIZE_LOOP_ERROR", "Exceeded max number of optimization iterations: {0}"); public static final DiagnosticType MOTION_ITERATIONS_ERROR = DiagnosticType.error("JSC_OPTIMIZE_LOOP_ERROR", "Exceeded max number of code motion iterations: {0}"); private final CompilerExecutor compilerExecutor = new CompilerExecutor(); /** * Logger for the whole com.google.javascript.jscomp domain - * setting configuration for this logger affects all loggers * in other classes within the compiler. */ public static final Logger logger = Logger.getLogger("com.google.javascript.jscomp"); private final PrintStream outStream; private GlobalVarReferenceMap globalRefMap = null; private volatile double progress = 0.0; private String lastPassName; private Set<String> externProperties = null; private static final Joiner pathJoiner = Joiner.on(File.separator); // TODO(johnlenz): remove "currentScope". // Used as a shortcut for change tracking. This is the current scope being // visited by the "current" NodeTraversal. This can't be thread safe so // we should move it into the NodeTraversal and require explicit changed // nodes elsewhere so we aren't blocked from doing this elsewhere. private Node currentChangeScope = null; // Starts at 0, increases as "interesting" things happen. // Nothing happens at time START_TIME, the first pass starts at time 1. // The correctness of scope-change tracking relies on Node/getIntProp // returning 0 if the custom attribute on a node hasn't been set. private int changeStamp = 1; /** * Creates a Compiler that reports errors and warnings to its logger. */ public Compiler() { this((PrintStream) null); } /** * Creates a Compiler that reports errors and warnings to an output stream. */ public Compiler(PrintStream stream) { addChangeHandler(recentChange); this.outStream = stream; } /** * Creates a Compiler that uses a custom error manager. */ public Compiler(ErrorManager errorManager) { this(); setErrorManager(errorManager); } /** * Sets the error manager. * * @param errorManager the error manager, it cannot be {@code null} */ public void setErrorManager(ErrorManager errorManager) { checkNotNull(errorManager, "the error manager cannot be null"); this.errorManager = new ThreadSafeDelegatingErrorManager(errorManager); } /** * Creates a message formatter instance corresponding to the value of * {@link CompilerOptions}. */ private MessageFormatter createMessageFormatter() { boolean colorize = options.shouldColorizeErrorOutput(); return options.errorFormat.toFormatter(this, colorize); } @VisibleForTesting void setOriginalSourcesLoader(ExternalSourceLoader originalSourcesLoader) { this.originalSourcesLoader = originalSourcesLoader; } /** * Initializes the compiler options. It's called as part of a normal compile() job. * Public for the callers that are not doing a normal compile() job. */ public void initOptions(CompilerOptions options) { this.options = options; this.languageMode = options.getLanguageIn(); if (errorManager == null) { if (this.outStream == null) { setErrorManager( new LoggerErrorManager(createMessageFormatter(), logger)); } else { PrintStreamErrorManager printer = new PrintStreamErrorManager(createMessageFormatter(), this.outStream); printer.setSummaryDetailLevel(options.summaryDetailLevel); setErrorManager(printer); } } reconcileOptionsWithGuards(); // TODO(johnlenz): generally, the compiler should not be changing the options object // provided by the user. This should be handled a different way. // Turn off type-based optimizations when type checking is off if (!options.checkTypes) { options.setDisambiguateProperties(false); options.setAmbiguateProperties(false); options.setInlineProperties(false); options.setUseTypesForLocalOptimization(false); options.setUseTypesForOptimization(false); } if (options.legacyCodeCompile) { options.setDisambiguateProperties(false); options.setAmbiguateProperties(false); options.useNonStrictWarningsGuard(); } if (options.assumeForwardDeclaredForMissingTypes) { this.forwardDeclaredTypes = new AbstractSet<String>() { @Override public boolean contains(Object o) { return true; // Report all types as forward declared types. } @Override public boolean add(String e) { return false; } @Override public Iterator<String> iterator() { return Collections.<String>emptySet().iterator(); } @Override public int size() { return 0; } }; } initWarningsGuard(options.getWarningsGuard()); } public void printConfig(PrintStream printStream) { printStream.println("==== CompilerOptions ===="); printStream.println(options.toString()); printStream.println("==== WarningsGuard ===="); printStream.println(warningsGuard.toString()); } void initWarningsGuard(WarningsGuard warningsGuard) { this.warningsGuard = new ComposeWarningsGuard( new SuppressDocWarningsGuard(getDiagnosticGroups().getRegisteredGroups()), warningsGuard); } /** * When the CompilerOptions and its WarningsGuard overlap, reconcile * any discrepencies. */ protected void reconcileOptionsWithGuards() { // DiagnosticGroups override the plain checkTypes option. if (options.enables(DiagnosticGroups.CHECK_TYPES)) { options.checkTypes = true; } else if (options.disables(DiagnosticGroups.CHECK_TYPES)) { options.checkTypes = false; } else if (!options.checkTypes) { // If DiagnosticGroups did not override the plain checkTypes // option, and checkTypes is enabled, then turn off the // parser type warnings. options.setWarningLevel( DiagnosticGroup.forType( RhinoErrorReporter.TYPE_PARSE_ERROR), CheckLevel.OFF); } DiagnosticGroupState ntiState = options.getWarningsGuard().enablesExplicitly(DiagnosticGroups.NEW_CHECK_TYPES); if (ntiState == DiagnosticGroupState.ON) { options.setNewTypeInference(true); } else if (ntiState == DiagnosticGroupState.OFF) { options.setNewTypeInference(false); } // With NTI, we still need OTI to run because the later passes that use // types only understand OTI types at the moment. // But we do not want to see the warnings from OTI. if (options.getNewTypeInference()) { options.checkTypes = true; // Suppress warnings from the const checks of CheckAccessControls so as to avoid // duplication. options.setWarningLevel(DiagnosticGroups.ACCESS_CONTROLS_CONST, CheckLevel.OFF); if (!options.reportOTIErrorsUnderNTI) { options.setWarningLevel( DiagnosticGroups.OLD_CHECK_TYPES, CheckLevel.OFF); options.setWarningLevel( DiagnosticGroups.OLD_REPORT_UNKNOWN_TYPES, CheckLevel.OFF); options.setWarningLevel( FunctionTypeBuilder.ALL_DIAGNOSTICS, CheckLevel.OFF); } options.setWarningLevel( DiagnosticGroup.forType(RhinoErrorReporter.TYPE_PARSE_ERROR), CheckLevel.WARNING); } if (options.checkGlobalThisLevel.isOn() && !options.disables(DiagnosticGroups.GLOBAL_THIS)) { options.setWarningLevel( DiagnosticGroups.GLOBAL_THIS, options.checkGlobalThisLevel); } if (expectStrictModeInput()) { options.setWarningLevel( DiagnosticGroups.ES5_STRICT, CheckLevel.ERROR); } // All passes must run the variable check. This synthesizes // variables later so that the compiler doesn't crash. It also // checks the externs file for validity. If you don't want to warn // about missing variable declarations, we shut that specific // error off. if (!options.checkSymbols && !options.enables(DiagnosticGroups.CHECK_VARIABLES)) { options.setWarningLevel( DiagnosticGroups.CHECK_VARIABLES, CheckLevel.OFF); } } private boolean expectStrictModeInput() { switch (options.getLanguageIn()) { case ECMASCRIPT3: case ECMASCRIPT5: case ECMASCRIPT6: return false; case ECMASCRIPT5_STRICT: case ECMASCRIPT6_STRICT: case ECMASCRIPT6_TYPED: return true; default: return options.isStrictModeInput(); } } /** * Initializes the instance state needed for a compile job. */ public <T1 extends SourceFile, T2 extends SourceFile> void init( List<T1> externs, List<T2> inputs, CompilerOptions options) { JSModule module = new JSModule(SINGLETON_MODULE_NAME); for (SourceFile input : inputs) { module.add(input); } List<JSModule> modules = new ArrayList<>(1); modules.add(module); initModules(externs, modules, options); addFilesToSourceMap(inputs); if (options.printConfig) { printConfig(System.err); } } /** * Initializes the instance state needed for a compile job if the sources * are in modules. */ public <T extends SourceFile> void initModules( List<T> externs, List<JSModule> modules, CompilerOptions options) { initOptions(options); checkFirstModule(modules); fillEmptyModules(modules); this.externs = makeCompilerInput(externs, true); // Generate the module graph, and report any errors in the module // specification as errors. this.modules = modules; try { this.moduleGraph = new JSModuleGraph(modules); } catch (JSModuleGraph.ModuleDependenceException e) { // problems with the module format. Report as an error. The // message gives all details. report(JSError.make(MODULE_DEPENDENCY_ERROR, e.getModule().getName(), e.getDependentModule().getName())); return; } this.inputs = getAllInputsFromModules(modules); this.commentsPerFile = new ConcurrentHashMap<>(inputs.size()); initBasedOnOptions(); initInputsByIdMap(); initAST(); } /** * Exists only for some tests that want to reuse JSModules. * @deprecated Fix those tests. */ @Deprecated public void breakThisCompilerSoItsModulesCanBeReused() { moduleGraph.breakThisGraphSoItsModulesCanBeReused(); moduleGraph = null; } /** * Do any initialization that is dependent on the compiler options. */ public void initBasedOnOptions() { inputSourceMaps.putAll(options.inputSourceMaps); // Create the source map if necessary. if (options.sourceMapOutputPath != null) { sourceMap = options.sourceMapFormat.getInstance(); sourceMap.setPrefixMappings(options.sourceMapLocationMappings); if (options.applyInputSourceMaps) { sourceMap.setSourceFileMapping(this); } } } private <T extends SourceFile> List<CompilerInput> makeCompilerInput( List<T> files, boolean isExtern) { List<CompilerInput> inputs = new ArrayList<>(files.size()); for (T file : files) { inputs.add(new CompilerInput(file, isExtern)); } return inputs; } private static final DiagnosticType EMPTY_MODULE_LIST_ERROR = DiagnosticType.error("JSC_EMPTY_MODULE_LIST_ERROR", "At least one module must be provided"); private static final DiagnosticType EMPTY_ROOT_MODULE_ERROR = DiagnosticType.error("JSC_EMPTY_ROOT_MODULE_ERROR", "Root module ''{0}'' must contain at least one source code input"); /** * Verifies that at least one module has been provided and that the first one * has at least one source code input. */ private void checkFirstModule(List<JSModule> modules) { if (modules.isEmpty()) { report(JSError.make(EMPTY_MODULE_LIST_ERROR)); } else if (modules.get(0).getInputs().isEmpty() && modules.size() > 1) { // The root module may only be empty if there is exactly 1 module. report(JSError.make(EMPTY_ROOT_MODULE_ERROR, modules.get(0).getName())); } } /** * Empty modules get an empty "fill" file, so that we can move code into * an empty module. */ static String createFillFileName(String moduleName) { return moduleName + "$fillFile"; } /** * Creates an OS specific path string from parts */ public static String joinPathParts(String... pathParts) { return pathJoiner.join(pathParts); } /** * Fill any empty modules with a place holder file. It makes any cross module * motion easier. */ private static void fillEmptyModules(List<JSModule> modules) { for (JSModule module : modules) { if (module.getInputs().isEmpty()) { module.add(SourceFile.fromCode( createFillFileName(module.getName()), "")); } } } /** * Rebuilds the internal list of inputs by iterating over all modules. * This is necessary if inputs have been added to or removed from a module * after the {@link #init(List, List, CompilerOptions)} call. */ public void rebuildInputsFromModules() { inputs = getAllInputsFromModules(modules); initInputsByIdMap(); } /** * Builds a single list of all module inputs. Verifies that it contains no * duplicates. */ private static List<CompilerInput> getAllInputsFromModules( List<JSModule> modules) { List<CompilerInput> inputs = new ArrayList<>(); Map<String, JSModule> inputMap = new HashMap<>(); for (JSModule module : modules) { for (CompilerInput input : module.getInputs()) { String inputName = input.getName(); // NOTE(nicksantos): If an input is in more than one module, // it will show up twice in the inputs list, and then we // will get an error down the line. inputs.add(input); inputMap.put(inputName, module); } } return inputs; } static final DiagnosticType DUPLICATE_INPUT = DiagnosticType.error("JSC_DUPLICATE_INPUT", "Duplicate input: {0}"); static final DiagnosticType DUPLICATE_EXTERN_INPUT = DiagnosticType.error("JSC_DUPLICATE_EXTERN_INPUT", "Duplicate extern input: {0}"); /** * Returns the relative path, resolved relative to the base path, where the * base path is interpreted as a filename rather than a directory. E.g.: * getRelativeTo("../foo/bar.js", "baz/bam/qux.js") --> "baz/foo/bar.js" */ private static String getRelativeTo(String relative, String base) { return FileSystems.getDefault().getPath(base) .resolveSibling(relative) .normalize() .toString() .replace(File.separator, "/"); } /** * Creates a map to make looking up an input by name fast. Also checks for * duplicate inputs. */ void initInputsByIdMap() { inputsById.clear(); for (CompilerInput input : externs) { InputId id = input.getInputId(); CompilerInput previous = putCompilerInput(id, input); if (previous != null) { report(JSError.make(DUPLICATE_EXTERN_INPUT, input.getName())); } } for (CompilerInput input : inputs) { InputId id = input.getInputId(); CompilerInput previous = putCompilerInput(id, input); if (previous != null) { report(JSError.make(DUPLICATE_INPUT, input.getName())); } } } /** * Sets up the skeleton of the AST (the externs and root). */ private void initAST() { jsRoot = IR.root(); externsRoot = IR.root(); externAndJsRoot = IR.root(externsRoot, jsRoot); } /** Compiles a single source file and a single externs file. */ public Result compile(SourceFile extern, SourceFile input, CompilerOptions options) { return compile(ImmutableList.of(extern), ImmutableList.of(input), options); } /** * Compiles a list of inputs. * * <p>This is a convenience method to wrap up all the work of compilation, including * generating the error and warning report. * * <p>NOTE: All methods called here must be public, because client code must be able to replicate * and customize this. */ public <T1 extends SourceFile, T2 extends SourceFile> Result compile( List<T1> externs, List<T2> inputs, CompilerOptions options) { // The compile method should only be called once. checkState(jsRoot == null); try { init(externs, inputs, options); if (!hasErrors()) { parseForCompilation(); } if (!hasErrors()) { if (options.getInstrumentForCoverageOnly()) { // TODO(bradfordcsmith): The option to instrument for coverage only should belong to the // runner, not the compiler. instrumentForCoverage(); } else { stage1Passes(); if (!hasErrors()) { stage2Passes(); } } completeCompilation(); } } finally { generateReport(); } return getResult(); } /** * Generates a report of all warnings and errors found during compilation to stderr. * * <p>Client code must call this method explicitly if it doesn't use one of the convenience * methods that do so automatically. * <p>Always call this method, even if the compiler throws an exception. The report will include * information about the exception. */ public void generateReport() { Tracer t = newTracer("generateReport"); errorManager.generateReport(); stopTracer(t, "generateReport"); } /** * Compiles a list of modules. * * <p>This is a convenience method to wrap up all the work of compilation, including * generating the error and warning report. * * <p>NOTE: All methods called here must be public, because client code must be able to replicate * and customize this. */ public <T extends SourceFile> Result compileModules( List<T> externs, List<JSModule> modules, CompilerOptions options) { // The compile method should only be called once. checkState(jsRoot == null); try { initModules(externs, modules, options); if (!hasErrors()) { parseForCompilation(); } if (!hasErrors()) { // TODO(bradfordcsmith): The option to instrument for coverage only should belong to the // runner, not the compiler. if (options.getInstrumentForCoverageOnly()) { instrumentForCoverage(); } else { stage1Passes(); if (!hasErrors()) { stage2Passes(); } } completeCompilation(); } } finally { generateReport(); } return getResult(); } /** * Perform compiler passes for stage 1 of compilation. * * <p>Stage 1 consists primarily of error and type checking passes. * * <p>{@code parseForCompilation()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. */ public void stage1Passes() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); checkState(!options.getInstrumentForCoverageOnly()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { performChecksAndTranspilation(); return null; } }); } /** * Perform compiler passes for stage 2 of compilation. * * <p>Stage 2 consists primarily of optimization passes. * * <p>{@code stage1Passes()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. */ public void stage2Passes() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); checkState(!options.getInstrumentForCoverageOnly()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { if (options.shouldOptimize()) { performOptimizations(); } return null; } }); } /** * Disable threads. This is for clients that run on AppEngine and * don't have threads. */ public void disableThreads() { compilerExecutor.disableThreads(); } /** * Sets the timeout when Compiler is run in a thread * @param timeout seconds to wait before timeout */ public void setTimeout(int timeout) { compilerExecutor.setTimeout(timeout); } /** * The primary purpose of this method is to run the provided code with a larger than standard * stack. */ <T> T runInCompilerThread(Callable<T> callable) { return compilerExecutor.runInCompilerThread( callable, options != null && options.getTracerMode().isOn()); } private void performChecksAndTranspilation() { if (options.skipNonTranspilationPasses) { // i.e. whitespace-only mode, which will not work with goog.module without: whitespaceOnlyPasses(); if (options.lowerFromEs6()) { transpileAndDontCheck(); } } else { check(); // check() also includes transpilation } } /** * Performs all the bookkeeping required at the end of a compilation. * * <p>This method must be called if the compilation makes it as far as doing checks. * <p> DON'T call it if the compiler threw an exception. * <p> DO call it even when {@code hasErrors()} returns true. */ public void completeCompilation() { runInCompilerThread(new Callable<Void>() { @Override public Void call() throws Exception { completeCompilationInternal(); return null; } }); } /** * Performs all the bookkeeping required at the end of a compilation. */ private void completeCompilationInternal() { if (options.recordFunctionInformation) { recordFunctionInformation(); } if (options.devMode == DevMode.START_AND_END) { runSanityCheck(); } setProgress(1.0, "recordFunctionInformation"); if (tracker != null) { tracker.outputTracerReport(); } } /** * Instrument code for coverage. * * <p>{@code parseForCompilation()} must be called before this method is called. * * <p>The caller is responsible for also calling {@code generateReport()} to generate a report of * warnings and errors to stderr. See the invocation in {@link #compile} for a good example. * * <p>This method is mutually exclusive with stage1Passes() and stage2Passes(). * Either call those two methods or this one, but not both. */ public void instrumentForCoverage() { checkState( inputs != null && !inputs.isEmpty(), "No inputs. Did you call init() or initModules()?"); checkState(!hasErrors()); runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { checkState(options.getInstrumentForCoverageOnly()); checkState(!hasErrors()); instrumentForCoverageInternal(options.instrumentBranchCoverage); return null; } }); } private void instrumentForCoverageInternal(boolean instrumentBranchCoverage) { Tracer tracer = newTracer("instrumentationPass"); InstrumentOption instrumentOption = InstrumentOption.LINE_ONLY; if (instrumentBranchCoverage) { instrumentOption = InstrumentOption.BRANCH_ONLY; } process(new CoverageInstrumentationPass(this, CoverageReach.ALL, instrumentOption)); stopTracer(tracer, "instrumentationPass"); } /** * Parses input files in preparation for compilation. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * <p>TODO(bradfordcsmith): Rename this to parse() */ public void parseForCompilation() { runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { parseForCompilationInternal(); return null; } }); } /** * Parses input files in preparation for compilation. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * * <p>TODO(bradfordcsmith): Rename this to parse() */ private void parseForCompilationInternal() { setProgress(0.0, null); CompilerOptionsPreprocessor.preprocess(options); readInputs(); // Guesstimate. setProgress(0.02, "read"); parseInputs(); // Guesstimate. setProgress(0.15, "parse"); } /** * Parses input files without doing progress tracking that is part of a full compile. * * <p>Either {@code init()} or {@code initModules()} must be called first to set up the input * files to be read. * <p>TODO(bradfordcsmith): Rename this to parseIndependentOfCompilation() or similar. */ public void parse() { parseInputs(); } PassConfig getPassConfig() { if (passes == null) { passes = createPassConfigInternal(); } return passes; } /** * Create the passes object. Clients should use setPassConfig instead of * overriding this. */ PassConfig createPassConfigInternal() { return new DefaultPassConfig(options); } /** * @param passes The PassConfig to use with this Compiler. * @throws NullPointerException if passes is null * @throws IllegalStateException if this.passes has already been assigned */ public void setPassConfig(PassConfig passes) { // Important to check for null because if setPassConfig(null) is // called before this.passes is set, getPassConfig() will create a // new PassConfig object and use that, which is probably not what // the client wanted since they probably meant to use their // own PassConfig object. checkNotNull(passes); checkState(this.passes == null, "setPassConfig was already called"); this.passes = passes; } public void whitespaceOnlyPasses() { runCustomPasses(CustomPassExecutionTime.BEFORE_CHECKS); Tracer t = newTracer("runWhitespaceOnlyPasses"); try { for (PassFactory pf : getPassConfig().getWhitespaceOnlyPasses()) { pf.create(this).process(externsRoot, jsRoot); } } finally { stopTracer(t, "runWhitespaceOnlyPasses"); } } public void transpileAndDontCheck() { Tracer t = newTracer("runTranspileOnlyPasses"); try { for (PassFactory pf : getPassConfig().getTranspileOnlyPasses()) { pf.create(this).process(externsRoot, jsRoot); } } finally { stopTracer(t, "runTranspileOnlyPasses"); } } private PhaseOptimizer createPhaseOptimizer() { PhaseOptimizer phaseOptimizer = new PhaseOptimizer(this, tracker); if (options.devMode == DevMode.EVERY_PASS) { phaseOptimizer.setSanityCheck(sanityCheck); } if (options.getCheckDeterminism()) { phaseOptimizer.setPrintAstHashcodes(true); } return phaseOptimizer; } void check() { runCustomPasses(CustomPassExecutionTime.BEFORE_CHECKS); // We are currently only interested in check-passes for progress reporting // as it is used for IDEs, that's why the maximum progress is set to 1.0. phaseOptimizer = createPhaseOptimizer().withProgress( new PhaseOptimizer.ProgressRange(getProgress(), 1.0)); phaseOptimizer.consume(getPassConfig().getChecks()); phaseOptimizer.process(externsRoot, jsRoot); if (hasErrors()) { return; } if (options.getTweakProcessing().shouldStrip() || !options.stripTypes.isEmpty() || !options.stripNameSuffixes.isEmpty() || !options.stripTypePrefixes.isEmpty() || !options.stripNamePrefixes.isEmpty()) { stripCode(options.stripTypes, options.stripNameSuffixes, options.stripTypePrefixes, options.stripNamePrefixes); } runCustomPasses(CustomPassExecutionTime.BEFORE_OPTIMIZATIONS); phaseOptimizer = null; } @Override void setExternExports(String externExports) { this.externExports = externExports; } @Override void process(CompilerPass p) { p.process(externsRoot, jsRoot); } private final PassFactory sanityCheck = new PassFactory("sanityCheck", false) { @Override protected CompilerPass create(AbstractCompiler compiler) { return new SanityCheck(compiler); } }; private void maybeSanityCheck() { if (options.devMode == DevMode.EVERY_PASS) { runSanityCheck(); } } private void runSanityCheck() { sanityCheck.create(this).process(externsRoot, jsRoot); } /** * Strips code for smaller compiled code. This is useful for removing debug * statements to prevent leaking them publicly. */ void stripCode(Set<String> stripTypes, Set<String> stripNameSuffixes, Set<String> stripTypePrefixes, Set<String> stripNamePrefixes) { logger.fine("Strip code"); startPass("stripCode"); StripCode r = new StripCode(this, stripTypes, stripNameSuffixes, stripTypePrefixes, stripNamePrefixes); if (options.getTweakProcessing().shouldStrip()) { r.enableTweakStripping(); } process(r); endPass("stripCode"); } /** * Runs custom passes that are designated to run at a particular time. */ private void runCustomPasses(CustomPassExecutionTime executionTime) { if (options.customPasses != null) { Tracer t = newTracer("runCustomPasses"); try { for (CompilerPass p : options.customPasses.get(executionTime)) { process(p); } } finally { stopTracer(t, "runCustomPasses"); } } } private Tracer currentTracer = null; private String currentPassName = null; /** * Marks the beginning of a pass. */ void startPass(String passName) { checkState(currentTracer == null); currentPassName = passName; currentTracer = newTracer(passName); beforePass(passName); } /** * Marks the end of a pass. */ void endPass(String passName) { checkState(currentTracer != null, "Tracer should not be null at the end of a pass."); stopTracer(currentTracer, currentPassName); afterPass(passName); currentPassName = null; currentTracer = null; maybeSanityCheck(); } @Override final void beforePass(String passName) { // does nothing for now } @Override final void afterPass(String passName) { if (options.printSourceAfterEachPass) { String currentJsSource = getCurrentJsSource(); if (!currentJsSource.equals(this.lastJsSource)) { System.out.println(); System.out.println("// " + passName + " yields:"); System.out.println("// ************************************"); System.out.println(currentJsSource); lastJsSource = currentJsSource; } } } final String getCurrentJsSource() { List<String> filenames = options.filesToPrintAfterEachPass; if (filenames.isEmpty()) { return toSource(); } else { StringBuilder builder = new StringBuilder(); for (String filename : filenames) { Node script = getScriptNode(filename); String source = script != null ? "// " + script.getSourceFileName() + "\n" + toSource(script) : "File '" + filename + "' not found"; builder.append(source); } return builder.toString(); } } final Node getScriptNode(String filename) { for (Node file : jsRoot.children()) { if (file.getSourceFileName() != null && file.getSourceFileName().endsWith(filename)) { return file; } } return null; } /** * Returns a new tracer for the given pass name. */ Tracer newTracer(String passName) { String comment = passName + (recentChange.hasCodeChanged() ? " on recently changed AST" : ""); if (options.getTracerMode().isOn() && tracker != null) { tracker.recordPassStart(passName, true); } return new Tracer("Compiler", comment); } void stopTracer(Tracer t, String passName) { long result = t.stop(); if (options.getTracerMode().isOn() && tracker != null) { tracker.recordPassStop(passName, result); } } /** * Returns the result of the compilation. */ public Result getResult() { PassConfig.State state = getPassConfig().getIntermediateState(); Set<SourceFile> transpiledFiles = new HashSet<>(); if (jsRoot != null) { for (Node scriptNode : jsRoot.children()) { if (scriptNode.getBooleanProp(Node.TRANSPILED)) { transpiledFiles.add(getSourceFileByName(scriptNode.getSourceFileName())); } } } return new Result(getErrors(), getWarnings(), debugLog.toString(), state.variableMap, state.propertyMap, state.anonymousFunctionNameMap, state.stringMap, functionInformationMap, sourceMap, externExports, state.cssNames, state.idGeneratorMap, transpiledFiles); } /** * Returns the array of errors (never null). */ public JSError[] getErrors() { if (errorManager == null) { return new JSError[] {}; } return errorManager.getErrors(); } /** * Returns the array of warnings (never null). */ public JSError[] getWarnings() { if (errorManager == null) { return new JSError[] {}; } return errorManager.getWarnings(); } @Override public Node getRoot() { return externAndJsRoot; } @Override CompilerOptions.LanguageMode getLanguageMode() { return languageMode; } @Override void setLanguageMode(CompilerOptions.LanguageMode mode) { languageMode = mode; } /** * Creates a new id for making unique names. */ private int nextUniqueNameId() { return uniqueNameId++; } /** * Resets the unique name id counter */ @VisibleForTesting void resetUniqueNameId() { uniqueNameId = 0; } @Override Supplier<String> getUniqueNameIdSupplier() { final Compiler self = this; return new Supplier<String>() { @Override public String get() { return String.valueOf(self.nextUniqueNameId()); } }; } @Override boolean areNodesEqualForInlining(Node n1, Node n2) { if (options.shouldAmbiguateProperties() || options.shouldDisambiguateProperties()) { // The type based optimizations require that type information is preserved // during other optimizations. return n1.isEquivalentToTyped(n2); } else { return n1.isEquivalentTo(n2); } } //------------------------------------------------------------------------ // Inputs //------------------------------------------------------------------------ // TODO(nicksantos): Decide which parts of these belong in an AbstractCompiler // interface, and which ones should always be injected. @Override public CompilerInput getInput(InputId id) { // TODO(bradfordcsmith): Allowing null id is less ideal. Add checkNotNull(id) here and fix // call sites that break. if (id == null) { return null; } return inputsById.get(id); } /** * Removes an input file from AST. * @param id The id of the input to be removed. */ protected void removeExternInput(InputId id) { CompilerInput input = getInput(id); if (input == null) { return; } checkState(input.isExtern(), "Not an extern input: %s", input.getName()); inputsById.remove(id); externs.remove(input); Node root = input.getAstRoot(this); if (root != null) { root.detach(); } } // Where to put a new synthetic externs file. private static enum SyntheticExternsPosition { START, END } CompilerInput newExternInput(String name, SyntheticExternsPosition pos) { SourceAst ast = new SyntheticAst(name); if (inputsById.containsKey(ast.getInputId())) { throw new IllegalArgumentException("Conflicting externs name: " + name); } CompilerInput input = new CompilerInput(ast, true); putCompilerInput(input.getInputId(), input); if (pos == SyntheticExternsPosition.START) { externsRoot.addChildToFront(ast.getAstRoot(this)); externs.add(0, input); } else { externsRoot.addChildToBack(ast.getAstRoot(this)); externs.add(input); } return input; } CompilerInput putCompilerInput(InputId id, CompilerInput input) { input.setCompiler(this); return inputsById.put(id, input); } /** * Replace a source input dynamically. Intended for incremental * re-compilation. * * If the new source input doesn't parse, then keep the old input * in the AST and return false. * * @return Whether the new AST was attached successfully. */ boolean replaceIncrementalSourceAst(JsAst ast) { CompilerInput oldInput = getInput(ast.getInputId()); checkNotNull(oldInput, "No input to replace: %s", ast.getInputId().getIdName()); Node newRoot = ast.getAstRoot(this); if (newRoot == null) { return false; } Node oldRoot = oldInput.getAstRoot(this); if (oldRoot != null) { oldRoot.replaceWith(newRoot); } else { getRoot().getLastChild().addChildToBack(newRoot); } CompilerInput newInput = new CompilerInput(ast); putCompilerInput(ast.getInputId(), newInput); JSModule module = oldInput.getModule(); if (module != null) { module.addAfter(newInput, oldInput); module.remove(oldInput); } // Verify the input id is set properly. checkState(newInput.getInputId().equals(oldInput.getInputId())); InputId inputIdOnAst = newInput.getAstRoot(this).getInputId(); checkState(newInput.getInputId().equals(inputIdOnAst)); inputs.remove(oldInput); return true; } /** * Add a new source input dynamically. Intended for incremental compilation. * <p> * If the new source input doesn't parse, it will not be added, and a false * will be returned. * * @param ast the JS Source to add. * @return true if the source was added successfully, false otherwise. * @throws IllegalStateException if an input for this ast already exists. */ boolean addNewSourceAst(JsAst ast) { CompilerInput oldInput = getInput(ast.getInputId()); if (oldInput != null) { throw new IllegalStateException( "Input already exists: " + ast.getInputId().getIdName()); } Node newRoot = ast.getAstRoot(this); if (newRoot == null) { return false; } getRoot().getLastChild().addChildToBack(newRoot); CompilerInput newInput = new CompilerInput(ast); // TODO(tylerg): handle this for multiple modules at some point. if (moduleGraph == null && !modules.isEmpty()) { // singleton module modules.get(0).add(newInput); } putCompilerInput(ast.getInputId(), newInput); return true; } /** * The graph of the JS source modules. * * <p>Must return null if there are less than 2 modules, * because we use this as a signal for which passes to run. * TODO(bradfordcsmith): Just check for a single module instead of null. */ @Override JSModuleGraph getModuleGraph() { if (moduleGraph != null && modules.size() > 1) { return moduleGraph; } else { return null; } } /** * Gets a module graph. This will always return a module graph, even * in the degenerate case when there's only one module. */ JSModuleGraph getDegenerateModuleGraph() { return moduleGraph; } @Override public TypeIRegistry getTypeIRegistry() { switch (mostRecentTypechecker) { case NONE: // Even in compiles where typechecking is not enabled, some passes ask for the // type registry, eg, GatherExternProperties does. Also, in CheckAccessControls, // the constructor asks for a type registry, and this may happen before type checking // runs. So, in the NONE case, if NTI is enabled, return a new registry, since NTI is // the relevant type checker. If NTI is not enabled, return an old registry. return options.getNewTypeInference() ? getSymbolTable() : getTypeRegistry(); case OTI: return getTypeRegistry(); case NTI: return getSymbolTable(); default: throw new RuntimeException("Unhandled typechecker " + mostRecentTypechecker); } } @Override public JSTypeRegistry getTypeRegistry() { if (typeRegistry == null) { typeRegistry = new JSTypeRegistry(oldErrorReporter, forwardDeclaredTypes); } return typeRegistry; } @Override void forwardDeclareType(String typeName) { if (options.allowUnfulfilledForwardDeclarations()) { forwardDeclaredTypes.add(typeName); } } @Override void setMostRecentTypechecker(MostRecentTypechecker lastRun) { this.mostRecentTypechecker = lastRun; } @Override // Only used by jsdev public MemoizedScopeCreator getTypedScopeCreator() { return getPassConfig().getTypedScopeCreator(); } @SuppressWarnings("unchecked") DefaultPassConfig ensureDefaultPassConfig() { PassConfig passes = getPassConfig().getBasePassConfig(); checkState( passes instanceof DefaultPassConfig, "PassConfigs must eventually delegate to the DefaultPassConfig"); return (DefaultPassConfig) passes; } public SymbolTable buildKnownSymbolTable() { SymbolTable symbolTable = new SymbolTable(this, getTypeRegistry()); MemoizedScopeCreator typedScopeCreator = getTypedScopeCreator(); if (typedScopeCreator != null) { symbolTable.addScopes(typedScopeCreator.getAllMemoizedScopes()); symbolTable.addSymbolsFrom(typedScopeCreator); } else { symbolTable.findScopes(externsRoot, jsRoot); } GlobalNamespace globalNamespace = ensureDefaultPassConfig().getGlobalNamespace(); if (globalNamespace != null) { symbolTable.addSymbolsFrom(globalNamespace); } ReferenceCollectingCallback refCollector = new ReferenceCollectingCallback( this, ReferenceCollectingCallback.DO_NOTHING_BEHAVIOR, SyntacticScopeCreator.makeUntyped(this)); refCollector.process(getRoot()); symbolTable.addSymbolsFrom(refCollector); PreprocessorSymbolTable preprocessorSymbolTable = ensureDefaultPassConfig().getPreprocessorSymbolTable(); if (preprocessorSymbolTable != null) { symbolTable.addSymbolsFrom(preprocessorSymbolTable); } symbolTable.fillNamespaceReferences(); symbolTable.fillPropertyScopes(); symbolTable.fillThisReferences(externsRoot, jsRoot); symbolTable.fillPropertySymbols(externsRoot, jsRoot); symbolTable.fillJSDocInfo(externsRoot, jsRoot); symbolTable.fillSymbolVisibility(externsRoot, jsRoot); return symbolTable; } @Override public TypedScope getTopScope() { return getPassConfig().getTopScope(); } @Override public ReverseAbstractInterpreter getReverseAbstractInterpreter() { if (abstractInterpreter == null) { ChainableReverseAbstractInterpreter interpreter = new SemanticReverseAbstractInterpreter(getTypeRegistry()); if (options.closurePass) { interpreter = new ClosureReverseAbstractInterpreter(getTypeRegistry()) .append(interpreter).getFirst(); } abstractInterpreter = interpreter; } return abstractInterpreter; } @Override // Only used by passes in the old type checker. TypeValidator getTypeValidator() { if (typeValidator == null) { typeValidator = new TypeValidator(this); } return typeValidator; } @Override Iterable<TypeMismatch> getTypeMismatches() { switch (this.mostRecentTypechecker) { case OTI: return getTypeValidator().getMismatches(); case NTI: return getSymbolTable().getMismatches(); default: throw new RuntimeException("Can't ask for type mismatches before type checking."); } } @Override Iterable<TypeMismatch> getImplicitInterfaceUses() { switch (this.mostRecentTypechecker) { case OTI: return getTypeValidator().getImplicitInterfaceUses(); case NTI: return getSymbolTable().getImplicitInterfaceUses(); default: throw new RuntimeException("Can't ask for type mismatches before type checking."); } } @Override GlobalTypeInfo getSymbolTable() { if (this.symbolTable == null) { this.symbolTable = new GlobalTypeInfo(this, forwardDeclaredTypes); } return this.symbolTable; } @Override DefinitionUseSiteFinder getDefinitionFinder() { return this.defFinder; } @Override void setDefinitionFinder(DefinitionUseSiteFinder defFinder) { this.defFinder = defFinder; } //------------------------------------------------------------------------ // Reading //------------------------------------------------------------------------ /** * Performs all externs and main inputs IO. * * <p>Allows for easy measurement of IO cost separately from parse cost. */ void readInputs() { checkState(!hasErrors()); checkNotNull(externs); checkNotNull(inputs); if (options.getTracerMode().isOn()) { tracker = new PerformanceTracker(externsRoot, jsRoot, options.getTracerMode(), this.outStream); addChangeHandler(tracker.getCodeChangeHandler()); } Tracer tracer = newTracer(READING_PASS_NAME); beforePass(READING_PASS_NAME); try { for (CompilerInput input : Iterables.concat(externs, inputs)) { try { input.getCode(); } catch (IOException e) { report(JSError.make(AbstractCompiler.READ_ERROR, input.getName())); } } } finally { afterPass(READING_PASS_NAME); stopTracer(tracer, READING_PASS_NAME); } } //------------------------------------------------------------------------ // Parsing //------------------------------------------------------------------------ /** * Parses the externs and main inputs. * * @return A synthetic root node whose two children are the externs root * and the main root */ Node parseInputs() { boolean devMode = options.devMode != DevMode.OFF; // If old roots exist (we are parsing a second time), detach each of the // individual file parse trees. externsRoot.detachChildren(); jsRoot.detachChildren(); Tracer tracer = newTracer(PARSING_PASS_NAME); beforePass(PARSING_PASS_NAME); try { // Parse externs sources. if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(externs); } for (CompilerInput input : externs) { Node n = input.getAstRoot(this); if (hasErrors()) { return null; } externsRoot.addChildToBack(n); } if (options.lowerFromEs6() || options.transformAMDToCJSModules || options.processCommonJSModules) { this.moduleLoader = new ModuleLoader( this, options.moduleRoots, inputs, ModuleLoader.PathResolver.RELATIVE, options.moduleResolutionMode, null); if (options.moduleResolutionMode == ModuleLoader.ResolutionMode.NODE) { // processJsonInputs requires a module loader to already be defined // so we redefine it afterwards with the package.json inputs this.moduleLoader = new ModuleLoader( this, options.moduleRoots, inputs, ModuleLoader.PathResolver.RELATIVE, options.moduleResolutionMode, processJsonInputs(inputs)); } if (options.lowerFromEs6()) { processEs6Modules(); } // Modules inferred in ProcessCommonJS pass. if (options.transformAMDToCJSModules || options.processCommonJSModules) { processAMDAndCommonJSModules(); } // Build a map of module identifiers for any input which provides no namespace. // These files could be imported modules which have no exports, but do have side effects. Map<String, CompilerInput> inputModuleIdentifiers = new HashMap<>(); for (CompilerInput input : inputs) { if (input.getKnownProvides().isEmpty()) { ModuleLoader.ModulePath modPath = moduleLoader.resolve(input.getSourceFile().getOriginalPath()); inputModuleIdentifiers.put(modPath.toModuleName(), input); } } // Find out if any input attempted to import a module that had no exports. // In this case we must force module rewriting to occur on the imported file Map<String, CompilerInput> inputsToRewrite = new HashMap<>(); for (CompilerInput input : inputs) { for (String require : input.getKnownRequires()) { if (inputModuleIdentifiers.containsKey(require) && !inputsToRewrite.containsKey(require)) { inputsToRewrite.put(require, inputModuleIdentifiers.get(require)); } } } if (!inputsToRewrite.isEmpty()) { processEs6Modules(new ArrayList<>(inputsToRewrite.values()), true); } } else { // Use an empty module loader if we're not actually dealing with modules. this.moduleLoader = ModuleLoader.EMPTY; } orderInputs(); // If in IDE mode, we ignore the error and keep going. if (hasErrors()) { return null; } // Build the AST. if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(inputs); } for (CompilerInput input : inputs) { Node n = input.getAstRoot(this); if (n == null) { continue; } if (devMode) { runSanityCheck(); if (hasErrors()) { return null; } } // TODO(johnlenz): we shouldn't need to check both isExternExportsEnabled and // externExportsPath. if (options.sourceMapOutputPath != null || options.isExternExportsEnabled() || options.externExportsPath != null || !options.replaceStringsFunctionDescriptions.isEmpty()) { // Annotate the nodes in the tree with information from the // input file. This information is used to construct the SourceMap. SourceInformationAnnotator sia = new SourceInformationAnnotator( input.getName(), options.devMode != DevMode.OFF); NodeTraversal.traverseEs6(this, n, sia); } jsRoot.addChildToBack(n); } if (hasErrors()) { return null; } return externAndJsRoot; } finally { afterPass(PARSING_PASS_NAME); stopTracer(tracer, PARSING_PASS_NAME); } } void orderInputsWithLargeStack() { runInCompilerThread(new Callable<Void>() { @Override public Void call() throws Exception { Tracer tracer = newTracer("orderInputsWithLargeStack"); try { orderInputs(); } finally { stopTracer(tracer, "orderInputsWithLargeStack"); } return null; } }); } void orderInputs() { hoistUnorderedExterns(); // Check if the sources need to be re-ordered. boolean staleInputs = false; if (options.dependencyOptions.needsManagement()) { for (CompilerInput input : inputs) { // Forward-declare all the provided types, so that they // are not flagged even if they are dropped from the process. for (String provide : input.getProvides()) { forwardDeclareType(provide); } } try { inputs = getDegenerateModuleGraph().manageDependencies(options.dependencyOptions, inputs); staleInputs = true; } catch (MissingProvideException e) { report(JSError.make( MISSING_ENTRY_ERROR, e.getMessage())); } catch (JSModuleGraph.MissingModuleException e) { report(JSError.make( MISSING_MODULE_ERROR, e.getMessage())); } } if (options.dependencyOptions.needsManagement() && options.allowGoogProvideInExterns()) { hoistAllExterns(); } hoistNoCompileFiles(); if (staleInputs) { repartitionInputs(); } } /** * Hoists inputs with the @externs annotation and no provides or requires into the externs list. */ void hoistUnorderedExterns() { boolean staleInputs = false; for (CompilerInput input : inputs) { if (options.dependencyOptions.needsManagement()) { // If we're doing scanning dependency info anyway, use that // information to skip sources that obviously aren't externs. if (!input.getProvides().isEmpty() || !input.getRequires().isEmpty()) { continue; } } if (hoistIfExtern(input)) { staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } /** * Hoists inputs with the @externs annotation into the externs list. */ void hoistAllExterns() { boolean staleInputs = false; for (CompilerInput input : inputs) { if (hoistIfExtern(input)) { staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } /** * Hoists a compiler input to externs if it contains the @externs annotation. * Return whether or not the given input was hoisted. */ private boolean hoistIfExtern(CompilerInput input) { Node n = input.getAstRoot(this); // Inputs can have a null AST on a parse error. if (n == null) { return false; } JSDocInfo info = n.getJSDocInfo(); if (info != null && info.isExterns()) { // If the input file is explicitly marked as an externs file, then // assume the programmer made a mistake and throw it into // the externs pile anyways. externsRoot.addChildToBack(n); input.setIsExtern(true); input.getModule().remove(input); externs.add(input); return true; } return false; } /** * Hoists inputs with the @nocompile annotation out of the inputs. */ void hoistNoCompileFiles() { boolean staleInputs = false; for (CompilerInput input : inputs) { Node n = input.getAstRoot(this); // Inputs can have a null AST on a parse error. if (n == null) { continue; } JSDocInfo info = n.getJSDocInfo(); if (info != null && info.isNoCompile()) { input.getModule().remove(input); staleInputs = true; } } if (staleInputs) { repartitionInputs(); } } private void repartitionInputs() { fillEmptyModules(modules); rebuildInputsFromModules(); } /** * Transforms JSON files to a module export that closure compiler can process and keeps track of * any "main" entries in package.json files. */ Map<String, String> processJsonInputs(List<CompilerInput> inputsToProcess) { RewriteJsonToModule rewriteJson = new RewriteJsonToModule(this); for (CompilerInput input : inputsToProcess) { if (!input.getSourceFile().getOriginalPath().endsWith(".json")) { continue; } input.setCompiler(this); try { // JSON objects need wrapped in parens to parse properly input.getSourceFile().setCode("(" + input.getSourceFile().getCode() + ")"); } catch (IOException e) { continue; } Node root = input.getAstRoot(this); if (root == null) { continue; } rewriteJson.process(null, root); } return rewriteJson.getPackageJsonMainEntries(); } void processEs6Modules() { processEs6Modules(inputs, false); } void processEs6Modules(List<CompilerInput> inputsToProcess, boolean forceRewrite) { List<CompilerInput> filteredInputs = new ArrayList<>(); for (CompilerInput input : inputsToProcess) { // Only process files that are detected as ES6 modules or forced to be rewritten if (forceRewrite || !options.dependencyOptions.shouldPruneDependencies() || !JsFileParser.isSupported() || (input.getLoadFlags().containsKey("module") && input.getLoadFlags().get("module").equals("es6"))) { filteredInputs.add(input); } } if (options.numParallelThreads > 1) { new PrebuildAst(this, options.numParallelThreads).prebuild(filteredInputs); } for (CompilerInput input : filteredInputs) { input.setCompiler(this); Node root = input.getAstRoot(this); if (root == null) { continue; } new ProcessEs6Modules(this).processFile(root, forceRewrite); } } /** * Transforms AMD and CJS modules to something closure compiler can * process and creates JSModules and the corresponding dependency tree * on the way. */ void processAMDAndCommonJSModules() { for (CompilerInput input : inputs) { input.setCompiler(this); Node root = input.getAstRoot(this); if (root == null) { continue; } if (options.transformAMDToCJSModules) { new TransformAMDToCJSModule(this).process(null, root); } if (options.processCommonJSModules) { ProcessCommonJSModules cjs = new ProcessCommonJSModules(this, true); cjs.process(null, root); } } } public Node parse(SourceFile file) { initCompilerOptionsIfTesting(); addToDebugLog("Parsing: " + file.getName()); return new JsAst(file).getAstRoot(this); } /** * Allow subclasses to override the default CompileOptions object. */ protected CompilerOptions newCompilerOptions() { return new CompilerOptions(); } void initCompilerOptionsIfTesting() { if (options == null) { // initialization for tests that don't initialize the compiler // by the normal mechanisms. initOptions(newCompilerOptions()); } } private int syntheticCodeId = 0; @Override Node parseSyntheticCode(String js) { return parseSyntheticCode(" [synthetic:" + (++syntheticCodeId) + "] ", js); } @Override Node parseSyntheticCode(String fileName, String js) { initCompilerOptionsIfTesting(); SourceFile source = SourceFile.fromCode(fileName, js); addFilesToSourceMap(ImmutableList.of(source)); return parseCodeHelper(source); } @Override @VisibleForTesting Node parseTestCode(String js) { initCompilerOptionsIfTesting(); initBasedOnOptions(); return parseCodeHelper(SourceFile.fromCode("[testcode]", js)); } private Node parseCodeHelper(SourceFile src) { CompilerInput input = new CompilerInput(src); putCompilerInput(input.getInputId(), input); return input.getAstRoot(this); } @Override ErrorReporter getDefaultErrorReporter() { return oldErrorReporter; } //------------------------------------------------------------------------ // Convert back to source code //------------------------------------------------------------------------ /** * Converts the main parse tree back to JS code. */ @Override public String toSource() { return runInCompilerThread(new Callable<String>() { @Override public String call() throws Exception { Tracer tracer = newTracer("toSource"); try { CodeBuilder cb = new CodeBuilder(); if (jsRoot != null) { int i = 0; for (Node scriptNode = jsRoot.getFirstChild(); scriptNode != null; scriptNode = scriptNode.getNext()) { toSource(cb, i++, scriptNode); } } return cb.toString(); } finally { stopTracer(tracer, "toSource"); } } }); } /** * Converts the parse tree for each input back to JS code. */ public String[] toSourceArray() { return runInCompilerThread(new Callable<String[]>() { @Override public String[] call() throws Exception { Tracer tracer = newTracer("toSourceArray"); try { int numInputs = inputs.size(); String[] sources = new String[numInputs]; CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); cb.reset(); toSource(cb, i, scriptNode); sources[i] = cb.toString(); } return sources; } finally { stopTracer(tracer, "toSourceArray"); } } }); } /** * Converts the parse tree for a module back to JS code. */ public String toSource(final JSModule module) { return runInCompilerThread(new Callable<String>() { @Override public String call() throws Exception { List<CompilerInput> inputs = module.getInputs(); int numInputs = inputs.size(); if (numInputs == 0) { return ""; } CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); if (scriptNode == null) { throw new IllegalArgumentException( "Bad module: " + module.getName()); } toSource(cb, i, scriptNode); } return cb.toString(); } }); } /** * Converts the parse tree for each input in a module back to JS code. */ public String[] toSourceArray(final JSModule module) { return runInCompilerThread(new Callable<String[]>() { @Override public String[] call() throws Exception { List<CompilerInput> inputs = module.getInputs(); int numInputs = inputs.size(); if (numInputs == 0) { return new String[0]; } String[] sources = new String[numInputs]; CodeBuilder cb = new CodeBuilder(); for (int i = 0; i < numInputs; i++) { Node scriptNode = inputs.get(i).getAstRoot(Compiler.this); if (scriptNode == null) { throw new IllegalArgumentException( "Bad module input: " + inputs.get(i).getName()); } cb.reset(); toSource(cb, i, scriptNode); sources[i] = cb.toString(); } return sources; } }); } /** * Writes out JS code from a root node. If printing input delimiters, this * method will attach a comment to the start of the text indicating which * input the output derived from. If there were any preserve annotations * within the root's source, they will also be printed in a block comment * at the beginning of the output. */ public void toSource(final CodeBuilder cb, final int inputSeqNum, final Node root) { runInCompilerThread( new Callable<Void>() { @Override public Void call() throws Exception { if (options.printInputDelimiter) { if ((cb.getLength() > 0) && !cb.endsWith("\n")) { cb.append("\n"); // Make sure that the label starts on a new line } checkState(root.isScript()); String delimiter = options.inputDelimiter; String inputName = root.getInputId().getIdName(); String sourceName = root.getSourceFileName(); checkState(sourceName != null); checkState(!sourceName.isEmpty()); delimiter = delimiter .replaceAll("%name%", Matcher.quoteReplacement(inputName)) .replaceAll("%num%", String.valueOf(inputSeqNum)); cb.append(delimiter).append("\n"); } if (root.getJSDocInfo() != null) { String license = root.getJSDocInfo().getLicense(); if (license != null && cb.addLicense(license)) { cb.append("/*\n").append(license).append("*/\n"); } } // If there is a valid source map, then indicate to it that the current // root node's mappings are offset by the given string builder buffer. if (options.sourceMapOutputPath != null) { sourceMap.setStartingPosition(cb.getLineIndex(), cb.getColumnIndex()); } // if LanguageMode is strict, only print 'use strict' // for the first input file String code = toSource(root, sourceMap, inputSeqNum == 0); if (!code.isEmpty()) { cb.append(code); // In order to avoid parse ambiguity when files are concatenated // together, all files should end in a semi-colon. Do a quick // heuristic check if there's an obvious semi-colon already there. int length = code.length(); char lastChar = code.charAt(length - 1); char secondLastChar = length >= 2 ? code.charAt(length - 2) : '\0'; boolean hasSemiColon = lastChar == ';' || (lastChar == '\n' && secondLastChar == ';'); if (!hasSemiColon) { cb.append(";"); } } return null; } }); } /** * Generates JavaScript source code for an AST, doesn't generate source * map info. */ @Override public String toSource(Node n) { initCompilerOptionsIfTesting(); return toSource(n, null, true); } /** * Generates JavaScript source code for an AST. */ private String toSource(Node n, SourceMap sourceMap, boolean firstOutput) { CodePrinter.Builder builder = new CodePrinter.Builder(n); builder.setTypeRegistry(this.typeRegistry); builder.setCompilerOptions(options); builder.setSourceMap(sourceMap); builder.setTagAsExterns(firstOutput && options.shouldGenerateTypedExterns()); builder.setTagAsStrict(firstOutput && shouldEmitUseStrict()); return builder.build(); } private boolean shouldEmitUseStrict() { switch (options.getLanguageOut()) { case ECMASCRIPT3: case ECMASCRIPT5: case ECMASCRIPT6: return false; default: return options.isEmitUseStrict(); } } /** * Stores a buffer of text to which more can be appended. This is just like a * StringBuilder except that we also track the number of lines. */ public static class CodeBuilder { private final StringBuilder sb = new StringBuilder(); private int lineCount = 0; private int colCount = 0; private final Set<String> uniqueLicenses = new HashSet<>(); /** Removes all text, but leaves the line count unchanged. */ void reset() { sb.setLength(0); } /** Appends the given string to the text buffer. */ CodeBuilder append(String str) { sb.append(str); // Adjust the line and column information for the new text. int index = -1; int lastIndex = index; while ((index = str.indexOf('\n', index + 1)) >= 0) { ++lineCount; lastIndex = index; } if (lastIndex == -1) { // No new lines, append the new characters added. colCount += str.length(); } else { colCount = str.length() - (lastIndex + 1); } return this; } /** Returns all text in the text buffer. */ @Override public String toString() { return sb.toString(); } /** Returns the length of the text buffer. */ public int getLength() { return sb.length(); } /** Returns the (zero-based) index of the last line in the text buffer. */ int getLineIndex() { return lineCount; } /** Returns the (zero-based) index of the last column in the text buffer. */ int getColumnIndex() { return colCount; } /** Determines whether the text ends with the given suffix. */ boolean endsWith(String suffix) { return (sb.length() > suffix.length()) && suffix.equals(sb.substring(sb.length() - suffix.length())); } /** Adds a license and returns whether it is unique (has yet to be encountered). */ boolean addLicense(String license) { return uniqueLicenses.add(license); } } //------------------------------------------------------------------------ // Optimizations //------------------------------------------------------------------------ void performOptimizations() { checkState(options.shouldOptimize()); List<PassFactory> optimizations = getPassConfig().getOptimizations(); if (optimizations.isEmpty()) { return; } phaseOptimizer = createPhaseOptimizer(); phaseOptimizer.consume(optimizations); phaseOptimizer.process(externsRoot, jsRoot); phaseOptimizer = null; } @Override void setCssRenamingMap(CssRenamingMap map) { options.cssRenamingMap = map; } @Override CssRenamingMap getCssRenamingMap() { return options.cssRenamingMap; } /** Control Flow Analysis. */ ControlFlowGraph<Node> computeCFG() { logger.fine("Computing Control Flow Graph"); Tracer tracer = newTracer("computeCFG"); ControlFlowAnalysis cfa = new ControlFlowAnalysis(this, true, false); process(cfa); stopTracer(tracer, "computeCFG"); return cfa.getCfg(); } @Override void prepareAst(Node root) { CompilerPass pass = new PrepareAst(this); pass.process(null, root); } void recordFunctionInformation() { logger.fine("Recording function information"); startPass("recordFunctionInformation"); RecordFunctionInformation recordFunctionInfoPass = new RecordFunctionInformation( this, getPassConfig().getIntermediateState().functionNames); process(recordFunctionInfoPass); functionInformationMap = recordFunctionInfoPass.getMap(); endPass("recordFunctionInformation"); } protected final RecentChange recentChange = new RecentChange(); private final List<CodeChangeHandler> codeChangeHandlers = new ArrayList<>(); /** Name of the synthetic input that holds synthesized externs. */ static final String SYNTHETIC_EXTERNS = "{SyntheticVarsDeclar}"; /** * Name of the synthetic input that holds synthesized externs which * must be at the end of the externs AST. */ static final String SYNTHETIC_EXTERNS_AT_END = "{SyntheticVarsAtEnd}"; private CompilerInput synthesizedExternsInput = null; private CompilerInput synthesizedExternsInputAtEnd = null; private ImmutableMap<String, Node> defaultDefineValues = ImmutableMap.of(); @Override void addChangeHandler(CodeChangeHandler handler) { codeChangeHandlers.add(handler); } @Override void removeChangeHandler(CodeChangeHandler handler) { codeChangeHandlers.remove(handler); } Node getExternsRoot() { return externsRoot; } @Override Node getJsRoot() { return jsRoot; } /** * Some tests don't want to call the compiler "wholesale," they may not want * to call check and/or optimize. With this method, tests can execute custom * optimization loops. */ @VisibleForTesting void setPhaseOptimizer(PhaseOptimizer po) { this.phaseOptimizer = po; } @Override public int getChangeStamp() { return changeStamp; } @Override public void incrementChangeStamp() { changeStamp++; } @Override void setChangeScope(Node newChangeScopeRoot) { currentChangeScope = newChangeScopeRoot; } private Node getChangeScopeForNode(Node n) { /** * Compiler change reporting usually occurs after the AST change has already occurred. In the * case of node removals those nodes are already removed from the tree and so have no parent * chain to walk. In these situations changes are reported instead against what (used to be) * their parent. If that parent is itself a script node then it's important to be able to * recognize it as the enclosing scope without first stepping to its parent as well. */ if (n.isScript()) { return n; } n = NodeUtil.getEnclosingChangeScopeRoot(n.getParent()); if (n == null) { throw new IllegalStateException( "An enclosing scope is required for change reports but node " + n + " doesn't have one."); } return n; } private void recordChange(Node n) { n.setChangeTime(changeStamp); // Every code change happens at a different time changeStamp++; } @Override boolean hasScopeChanged(Node n) { if (phaseOptimizer == null) { return true; } return phaseOptimizer.hasScopeChanged(n); } /** * @deprecated * Use #reportChangeToEnclosingScope or NodeTraversal#reportCodeChange instead */ @Deprecated @Override public void reportCodeChange() { // TODO(johnlenz): if this is called with a null scope we need to invalidate everything // but this isn't done, so we need to make this illegal or record this as having // invalidated everything. if (currentChangeScope != null) { checkState(currentChangeScope.isScript() || currentChangeScope.isFunction()); recordChange(currentChangeScope); } notifyChangeHandlers(); } @Override public void reportChangeToChangeScope(Node changeScopeRoot) { checkState(changeScopeRoot.isScript() || changeScopeRoot.isFunction()); recordChange(changeScopeRoot); notifyChangeHandlers(); } @Override void reportChangeToEnclosingScope(Node n) { recordChange(getChangeScopeForNode(n)); notifyChangeHandlers(); } private void notifyChangeHandlers() { for (CodeChangeHandler handler : codeChangeHandlers) { handler.reportChange(); } } @Override public CodingConvention getCodingConvention() { CodingConvention convention = options.getCodingConvention(); convention = convention != null ? convention : defaultCodingConvention; return convention; } private Config.LanguageMode getParserConfigLanguageMode( CompilerOptions.LanguageMode languageMode) { switch (languageMode) { case ECMASCRIPT3: return Config.LanguageMode.ECMASCRIPT3; case ECMASCRIPT5: case ECMASCRIPT5_STRICT: return Config.LanguageMode.ECMASCRIPT5; case ECMASCRIPT6: case ECMASCRIPT6_STRICT: case ECMASCRIPT_2015: return Config.LanguageMode.ECMASCRIPT6; case ECMASCRIPT6_TYPED: return Config.LanguageMode.TYPESCRIPT; case ECMASCRIPT7: case ECMASCRIPT_2016: return Config.LanguageMode.ECMASCRIPT7; case ECMASCRIPT8: case ECMASCRIPT_2017: case ECMASCRIPT_NEXT: return Config.LanguageMode.ECMASCRIPT8; default: throw new IllegalStateException("Unexpected language mode: " + options.getLanguageIn()); } } @Override Config getParserConfig(ConfigContext context) { if (parserConfig == null || externsParserConfig == null) { synchronized (this) { if (parserConfig == null) { Config.LanguageMode configLanguageMode = getParserConfigLanguageMode( options.getLanguageIn()); Config.StrictMode strictMode = expectStrictModeInput() ? Config.StrictMode.STRICT : Config.StrictMode.SLOPPY; parserConfig = createConfig(configLanguageMode, strictMode); // Externs must always be parsed with at least ES5 language mode. externsParserConfig = configLanguageMode.equals(Config.LanguageMode.ECMASCRIPT3) ? createConfig(Config.LanguageMode.ECMASCRIPT5, strictMode) : parserConfig; } } } switch (context) { case EXTERNS: return externsParserConfig; default: return parserConfig; } } protected Config createConfig(Config.LanguageMode mode, Config.StrictMode strictMode) { Config config = ParserRunner.createConfig( mode, options.isParseJsDocDocumentation(), options.canContinueAfterErrors() ? Config.RunMode.KEEP_GOING : Config.RunMode.STOP_AFTER_ERROR, options.extraAnnotationNames, options.parseInlineSourceMaps, strictMode); return config; } //------------------------------------------------------------------------ // Error reporting //------------------------------------------------------------------------ /** * The warning classes that are available from the command-line, and * are suppressible by the {@code @suppress} annotation. */ protected DiagnosticGroups getDiagnosticGroups() { return new DiagnosticGroups(); } @Override public void report(JSError error) { CheckLevel level = error.getDefaultLevel(); if (warningsGuard != null) { CheckLevel newLevel = warningsGuard.level(error); if (newLevel != null) { level = newLevel; } } if (level.isOn()) { initCompilerOptionsIfTesting(); if (getOptions().errorHandler != null) { getOptions().errorHandler.report(level, error); } errorManager.report(level, error); } } @Override public void report(CheckLevel ignoredLevel, JSError error) { report(error); } @Override public CheckLevel getErrorLevel(JSError error) { checkNotNull(options); return warningsGuard.level(error); } /** * Report an internal error. */ @Override void throwInternalError(String message, Exception cause) { String finalMessage = "INTERNAL COMPILER ERROR.\n" + "Please report this problem.\n\n" + message; RuntimeException e = new RuntimeException(finalMessage, cause); if (cause != null) { e.setStackTrace(cause.getStackTrace()); } throw e; } /** * Gets the number of errors. */ public int getErrorCount() { return errorManager.getErrorCount(); } /** * Gets the number of warnings. */ public int getWarningCount() { return errorManager.getWarningCount(); } @Override boolean hasHaltingErrors() { return !getOptions().canContinueAfterErrors() && getErrorCount() > 0; } /** * Consults the {@link ErrorManager} to see if we've encountered errors * that should halt compilation. <p> * * If {@link CompilerOptions#canContinueAfterErrors} is {@code true}, this function * always returns {@code false} without consulting the error manager. The * error manager will continue to be told about new errors and warnings, but * the compiler will complete compilation of all inputs.<p> */ public boolean hasErrors() { return hasHaltingErrors(); } /** Called from the compiler passes, adds debug info */ @Override void addToDebugLog(String str) { if (options.useDebugLog) { debugLog.append(str); debugLog.append('\n'); logger.fine(str); } } @Override SourceFile getSourceFileByName(String sourceName) { // Here we assume that the source name is the input name, this // is try of JavaScript parsed from source. if (sourceName != null) { CompilerInput input = inputsById.get(new InputId(sourceName)); if (input != null) { return input.getSourceFile(); } // Alternatively, the sourceName might have been reverse-mapped by // an input source-map, so let's look in our sourcemap original sources. return sourceMapOriginalSources.get(sourceName); } return null; } public CharSequence getSourceFileContentByName(String sourceName) { SourceFile file = getSourceFileByName(sourceName); checkNotNull(file); try { return file.getCode(); } catch (IOException e) { return null; } } @Override public void addInputSourceMap(String sourceFileName, SourceMapInput inputSourceMap) { inputSourceMaps.put(sourceFileName, inputSourceMap); } @Override public OriginalMapping getSourceMapping(String sourceName, int lineNumber, int columnNumber) { if (sourceName == null) { return null; } SourceMapInput sourceMap = inputSourceMaps.get(sourceName); if (sourceMap == null) { return null; } // JSCompiler uses 1-indexing for lineNumber and 0-indexing for // columnNumber. // SourceMap uses 1-indexing for both. OriginalMapping result = sourceMap.getSourceMap() .getMappingForLine(lineNumber, columnNumber + 1); if (result == null) { return null; } // The sourcemap will return a path relative to the sourcemap's file. // Translate it to one relative to our base directory. String path = getRelativeTo(result.getOriginalFile(), sourceMap.getOriginalPath()); sourceMapOriginalSources.putIfAbsent(path, originalSourcesLoader.loadSource(path)); return result.toBuilder() .setOriginalFile(path) .setColumnPosition(result.getColumnPosition() - 1) .build(); } @Override public String getSourceLine(String sourceName, int lineNumber) { if (lineNumber < 1) { return null; } SourceFile input = getSourceFileByName(sourceName); if (input != null) { return input.getLine(lineNumber); } return null; } @Override public Region getSourceRegion(String sourceName, int lineNumber) { if (lineNumber < 1) { return null; } SourceFile input = getSourceFileByName(sourceName); if (input != null) { return input.getRegion(lineNumber); } return null; } //------------------------------------------------------------------------ // Package-private helpers //------------------------------------------------------------------------ @Override Node getNodeForCodeInsertion(JSModule module) { if (module == null) { if (inputs.isEmpty()) { throw new IllegalStateException("No inputs"); } return inputs.get(0).getAstRoot(this); } List<CompilerInput> moduleInputs = module.getInputs(); if (!moduleInputs.isEmpty()) { return moduleInputs.get(0).getAstRoot(this); } throw new IllegalStateException("Root module has no inputs"); } public SourceMap getSourceMap() { return sourceMap; } VariableMap getVariableMap() { return getPassConfig().getIntermediateState().variableMap; } VariableMap getPropertyMap() { return getPassConfig().getIntermediateState().propertyMap; } VariableMap getStringMap() { return getPassConfig().getIntermediateState().stringMap; } @Override CompilerOptions getOptions() { return options; } FunctionInformationMap getFunctionalInformationMap() { return functionInformationMap; } /** * Sets the logging level for the com.google.javascript.jscomp package. */ public static void setLoggingLevel(Level level) { logger.setLevel(level); } /** Gets the DOT graph of the AST generated at the end of compilation. */ public String getAstDotGraph() throws IOException { if (jsRoot != null) { ControlFlowAnalysis cfa = new ControlFlowAnalysis(this, true, false); cfa.process(null, jsRoot); return DotFormatter.toDot(jsRoot, cfa.getCfg()); } else { return ""; } } @Override public ErrorManager getErrorManager() { if (options == null) { initOptions(new CompilerOptions()); } return errorManager; } @Override List<CompilerInput> getInputsInOrder() { return Collections.unmodifiableList(inputs); } /** * Returns an unmodifiable view of the compiler inputs indexed by id. */ public Map<InputId, CompilerInput> getInputsById() { return Collections.unmodifiableMap(inputsById); } /** * Gets the externs in the order in which they are being processed. */ List<CompilerInput> getExternsInOrder() { return Collections.unmodifiableList(externs); } @VisibleForTesting List<CompilerInput> getInputsForTesting() { return inputs; } @VisibleForTesting List<CompilerInput> getExternsForTesting() { return externs; } @Override boolean hasRegExpGlobalReferences() { return hasRegExpGlobalReferences; } @Override void setHasRegExpGlobalReferences(boolean references) { hasRegExpGlobalReferences = references; } @Override void updateGlobalVarReferences(Map<Var, ReferenceCollection> refMapPatch, Node collectionRoot) { checkState(collectionRoot.isScript() || collectionRoot.isRoot()); if (globalRefMap == null) { globalRefMap = new GlobalVarReferenceMap(getInputsInOrder(), getExternsInOrder()); } globalRefMap.updateGlobalVarReferences(refMapPatch, collectionRoot); } @Override GlobalVarReferenceMap getGlobalVarReferences() { return globalRefMap; } @Override CompilerInput getSynthesizedExternsInput() { if (synthesizedExternsInput == null) { synthesizedExternsInput = newExternInput(SYNTHETIC_EXTERNS, SyntheticExternsPosition.START); } return synthesizedExternsInput; } @Override CompilerInput getSynthesizedExternsInputAtEnd() { if (synthesizedExternsInputAtEnd == null) { synthesizedExternsInputAtEnd = newExternInput( SYNTHETIC_EXTERNS_AT_END, SyntheticExternsPosition.END); } return synthesizedExternsInputAtEnd; } @Override public double getProgress() { return progress; } @Override String getLastPassName() { return lastPassName; } @Override void setProgress(double newProgress, String passName) { this.lastPassName = passName; if (newProgress > 1.0) { progress = 1.0; } else { progress = newProgress; } } @Override void setExternProperties(Set<String> externProperties) { this.externProperties = externProperties; } @Override Set<String> getExternProperties() { return externProperties; } /** * Replaces one file in a hot-swap mode. The given JsAst should be made * from a new version of a file that already was present in the last compile * call. If the file is new, this will silently ignored. * * @param ast the ast of the file that is being replaced */ public void replaceScript(JsAst ast) { CompilerInput input = this.getInput(ast.getInputId()); if (!replaceIncrementalSourceAst(ast)) { return; } Node originalRoot = input.getAstRoot(this); processNewScript(ast, originalRoot); } /** * Adds a new Script AST to the compile state. If a script for the same file * already exists the script will not be added, instead a call to * #replaceScript should be used. * * @param ast the ast of the new file */ public void addNewScript(JsAst ast) { if (!addNewSourceAst(ast)) { return; } Node emptyScript = new Node(Token.SCRIPT); InputId inputId = ast.getInputId(); emptyScript.setInputId(inputId); emptyScript.setStaticSourceFile( SourceFile.fromCode(inputId.getIdName(), "")); processNewScript(ast, emptyScript); } private void processNewScript(JsAst ast, Node originalRoot) { languageMode = options.getLanguageIn(); Node js = ast.getAstRoot(this); checkNotNull(js); runHotSwap(originalRoot, js, this.getCleanupPassConfig()); // NOTE: If hot swap passes that use GlobalNamespace are added, we will need // to revisit this approach to clearing GlobalNamespaces runHotSwapPass(null, null, ensureDefaultPassConfig().garbageCollectChecks); this.getTypeRegistry().clearNamedTypes(); this.removeSyntheticVarsInput(); runHotSwap(originalRoot, js, this.ensureDefaultPassConfig()); } /** * Execute the passes from a PassConfig instance over a single replaced file. */ private void runHotSwap( Node originalRoot, Node js, PassConfig passConfig) { for (PassFactory passFactory : passConfig.getChecks()) { runHotSwapPass(originalRoot, js, passFactory); } } private void runHotSwapPass( Node originalRoot, Node js, PassFactory passFactory) { HotSwapCompilerPass pass = passFactory.getHotSwapPass(this); if (pass != null) { if (logger.isLoggable(Level.INFO)) { logger.info("Performing HotSwap for pass " + passFactory.getName()); } pass.hotSwapScript(js, originalRoot); } } private PassConfig getCleanupPassConfig() { return new CleanupPasses(getOptions()); } private void removeSyntheticVarsInput() { String sourceName = Compiler.SYNTHETIC_EXTERNS; removeExternInput(new InputId(sourceName)); } @Override Node ensureLibraryInjected(String resourceName, boolean force) { boolean doNotInject = !force && (options.skipNonTranspilationPasses || options.preventLibraryInjection); if (injectedLibraries.containsKey(resourceName) || doNotInject) { return lastInjectedLibrary; } // Load/parse the code. String originalCode = ResourceLoader.loadTextResource( Compiler.class, "js/" + resourceName + ".js"); Node ast = parseSyntheticCode(" [synthetic:" + resourceName + "] ", originalCode); // Look for string literals of the form 'require foo bar' or 'externs baz' or 'normalize'. // As we process each one, remove it from its parent. for (Node node = ast.getFirstChild(); node != null && node.isExprResult() && node.getFirstChild().isString(); node = ast.getFirstChild()) { String directive = node.getFirstChild().getString(); List<String> words = Splitter.on(' ').limit(2).splitToList(directive); switch (words.get(0)) { case "use": // 'use strict' is ignored (and deleted). break; case "require": // 'require lib'; pulls in the named library before this one. ensureLibraryInjected(words.get(1), force); break; case "declare": // 'declare name'; adds the name to the externs (with no type information). // Note that we could simply add the entire externs library, but that leads to // potentially-surprising behavior when the externs that are present depend on // whether or not a polyfill is used. Node var = IR.var(IR.name(words.get(1))); JSDocInfoBuilder jsdoc = new JSDocInfoBuilder(false); // Suppress duplicate-var warning in case this name is already defined in the externs. jsdoc.addSuppression("duplicate"); var.setJSDocInfo(jsdoc.build()); getSynthesizedExternsInputAtEnd() .getAstRoot(this) .addChildToBack(var); break; default: throw new RuntimeException("Bad directive: " + directive); } ast.removeChild(node); } // If we've already started optimizations, then we need to normalize this. if (getLifeCycleStage().isNormalized()) { Normalize.normalizeSyntheticCode(this, ast, "jscomp_" + resourceName + "_"); } // Insert the code immediately after the last-inserted runtime library. Node lastChild = ast.getLastChild(); for (Node child = ast.getFirstChild(); child != null; child = child.getNext()) { NodeUtil.markNewScopesChanged(child, this); } Node firstChild = ast.removeChildren(); if (firstChild == null) { // Handle require-only libraries. return lastInjectedLibrary; } Node parent = getNodeForCodeInsertion(null); if (lastInjectedLibrary == null) { parent.addChildrenToFront(firstChild); } else { parent.addChildrenAfter(firstChild, lastInjectedLibrary); } lastInjectedLibrary = lastChild; injectedLibraries.put(resourceName, lastChild); reportChangeToEnclosingScope(parent); return lastChild; } /** Returns the compiler version baked into the jar. */ @GwtIncompatible("java.util.ResourceBundle") public static String getReleaseVersion() { ResourceBundle config = ResourceBundle.getBundle(CONFIG_RESOURCE); return config.getString("compiler.version"); } /** Returns the compiler date baked into the jar. */ @GwtIncompatible("java.util.ResourceBundle") public static String getReleaseDate() { ResourceBundle config = ResourceBundle.getBundle(CONFIG_RESOURCE); return config.getString("compiler.date"); } @Override void addComments(String filename, List<Comment> comments) { if (!getOptions().preservesDetailedSourceInfo()) { throw new UnsupportedOperationException( "addComments may only be called in IDE mode."); } commentsPerFile.put(filename, comments); } @Override public List<Comment> getComments(String filename) { if (!getOptions().preservesDetailedSourceInfo()) { throw new UnsupportedOperationException( "getComments may only be called in IDE mode."); } return commentsPerFile.get(filename); } @Override void setDefaultDefineValues(ImmutableMap<String, Node> values) { this.defaultDefineValues = values; } @Override ImmutableMap<String, Node> getDefaultDefineValues() { return this.defaultDefineValues; } @Override ModuleLoader getModuleLoader() { return moduleLoader; } private void addFilesToSourceMap(Iterable<? extends SourceFile> files) { if (getOptions().sourceMapIncludeSourcesContent && getSourceMap() != null) { for (SourceFile file : files) { getSourceMap().addSourceFile(file); } } } private void addFileToSourceMap(String filename, String contents) { if (getOptions().sourceMapIncludeSourcesContent && getSourceMap() != null) { getSourceMap().addSourceFile(SourceFile.fromCode(filename, contents)); } } /** * Serializable state of the compiler. */ private static class CompilerState implements Serializable { CompilerOptions options; Node externsRoot; Node jsRoot; Node externAndJsRoot; List<CompilerInput> externs; List<CompilerInput> inputs; Map<InputId, CompilerInput> inputsById; JSTypeRegistry typeRegistry; CompilerState( CompilerOptions options, Node externsRoot, Node jsRoot, Node externAndJsRoot, List<CompilerInput> externs, List<CompilerInput> inputs, Map<InputId, CompilerInput> inputsById, JSTypeRegistry typeRegistry) { this.options = options; this.externsRoot = externsRoot; this.jsRoot = jsRoot; this.externAndJsRoot = externAndJsRoot; this.typeRegistry = typeRegistry; this.externs = externs; this.inputs = inputs; this.inputsById = inputsById; } } @GwtIncompatible("ObjectOutputStream") public void saveState(OutputStream outputStream) throws IOException { CompilerState compilerState = new CompilerState( options, externsRoot, jsRoot, externAndJsRoot, externs, inputs, inputsById, typeRegistry); try (ObjectOutputStream objectOutputStream = new ObjectOutputStream(outputStream)) { objectOutputStream.writeObject(compilerState); } } @GwtIncompatible("ObjectInputStream") public void restoreState(InputStream inputStream) throws Exception { try (ObjectInputStream objectInputStream = new ObjectInputStream(inputStream)) { CompilerState compilerState = (CompilerState) objectInputStream.readObject(); options = compilerState.options; externs = compilerState.externs; inputs = compilerState.inputs; inputsById.clear(); inputsById.putAll(compilerState.inputsById); typeRegistry = compilerState.typeRegistry; externAndJsRoot = compilerState.externAndJsRoot; externsRoot = compilerState.externsRoot; jsRoot = compilerState.jsRoot; } initWarningsGuard(options.getWarningsGuard()); } }
jscomp/Compiler: remove unused addFileToSourceMap() ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=155669395
src/com/google/javascript/jscomp/Compiler.java
jscomp/Compiler: remove unused addFileToSourceMap()
<ide><path>rc/com/google/javascript/jscomp/Compiler.java <ide> } <ide> } <ide> <del> private void addFileToSourceMap(String filename, String contents) { <del> if (getOptions().sourceMapIncludeSourcesContent && getSourceMap() != null) { <del> getSourceMap().addSourceFile(SourceFile.fromCode(filename, contents)); <del> } <del> } <del> <ide> /** <ide> * Serializable state of the compiler. <ide> */
Java
mit
7a816559bb73ebcb1832f32a2a5b41d8f707c633
0
aviolette/foodtrucklocator,aviolette/foodtrucklocator,aviolette/foodtrucklocator,aviolette/foodtrucklocator
package foodtruck.schedule; import java.io.IOException; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import com.google.api.services.calendar.Calendar; import com.google.api.services.calendar.model.Event; import com.google.api.services.calendar.model.Events; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.Inject; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; import foodtruck.dao.TruckDAO; import foodtruck.geolocation.GeoLocator; import foodtruck.geolocation.GeolocationGranularity; import foodtruck.model.Location; import foodtruck.model.StopOrigin; import foodtruck.model.Truck; import foodtruck.model.TruckStop; import foodtruck.util.Clock; import foodtruck.util.FriendlyDateOnlyFormat; /** * @author aviolette * @since 11/20/14 */ public class GoogleCalendarV3Consumer implements ScheduleStrategy { private static final Logger log = Logger.getLogger(GoogleCalendarV3Consumer.class.getName()); private final Calendar calendarClient; private final TruckDAO truckDAO; private final AddressExtractor addressExtractor; private final GeoLocator geoLocator; private final Clock clock; private final DateTimeFormatter formatter; @Inject public GoogleCalendarV3Consumer(AddressExtractor addressExtractor, Calendar calendarClient, TruckDAO truckDAO, GeoLocator geoLocator, Clock clock, @FriendlyDateOnlyFormat DateTimeFormatter formatter) { this.calendarClient = calendarClient; this.truckDAO = truckDAO; this.addressExtractor = addressExtractor; this.geoLocator = geoLocator; this.clock = clock; this.formatter = formatter; } @Override public List<TruckStop> findForTime(Interval range, @Nullable Truck searchTruck) { String truckId = searchTruck == null ? null : searchTruck.getId(); log.info("Initiating calendar search " + truckId); List<TruckStop> stops = Lists.newLinkedList(); if (searchTruck != null && !Strings.isNullOrEmpty(searchTruck.getCalendarUrl())) { customCalendarSearch(range, searchTruck, stops); } else if (searchTruck == null) { for (Truck truck : truckDAO.findTrucksWithCalendars()) { customCalendarSearch(range, truck, stops); } } return stops; } private void customCalendarSearch(Interval range, Truck truck, List<TruckStop> stops) { try { final String calendarUrl = truck.getCalendarUrl(); if (Strings.isNullOrEmpty(calendarUrl)) { return; } log.info("Custom calendar search: " + calendarUrl); stops.addAll(performTruckSearch(range, truck)); } catch (RuntimeException rte) { log.info("Search truck: " + truck.getId()); log.log(Level.SEVERE, rte.getMessage(), rte); } } private List<TruckStop> performTruckSearch(Interval range, Truck truck) { ImmutableList.Builder<TruckStop> builder = ImmutableList.builder(); try { final String calendarId = truck.getCalendarUrl(); String pageToken = null; int timezoneAdjustment = truck.getTimezoneAdjustment(); do { Calendar.Events.List query = calendarClient.events().list(calendarId).setSingleEvents(true).setTimeMin( toGoogleDateTime(range.getStart())).setTimeMax(toGoogleDateTime(range.getEnd())).setPageToken(pageToken); Events events = query.execute(); List<Event> items = events.getItems(); for (Event event : items) { final String titleText = event.getSummary(); if (!Strings.isNullOrEmpty(titleText)) { String lowerTitle = titleText.toLowerCase(); if (lowerTitle.contains("private") || lowerTitle.contains("catering") || lowerTitle.contains("downtown chicago") || titleText.contains("TBD") || titleText.contains("TBA")) { log.log(Level.INFO, "Skipping {0} for {1}", new Object[]{titleText, truck.getId()}); continue; } } String where = event.getLocation(); Location location = null; if (!Strings.isNullOrEmpty(where)) { if (where.endsWith(", United States")) { where = where.substring(0, where.lastIndexOf(",")); // Fixes how google calendar normalizes fully-qualified addresses with a state, zip and country code } else if (where.lastIndexOf(", IL ") != -1) { where = where.substring(0, where.lastIndexOf(", IL ")) + ", IL"; } // HACK Alert, the address extractor doesn't handle non-Chicago addresses well, so // if it is a fully qualified address written by me, it will probably end in City, IL if (!where.endsWith(", IL")) { where = coalesce(Iterables.getFirst(addressExtractor.parse(where, truck), null), where); } location = geoLocator.locate(where, GeolocationGranularity.NARROW); } if (location == null || !location.isResolved()) { // Sometimes the location is in the title - try that too if (!Strings.isNullOrEmpty(titleText)) { where = titleText; log.info("Trying title text: " + titleText); final List<String> parsed = addressExtractor.parse(titleText, truck); String locString = Iterables.getFirst(parsed, null); if (locString == null) { log.info("Failed to parse titletext for address, trying whole thing: " + titleText); locString = titleText; } if (locString != null) { location = geoLocator.locate(locString, GeolocationGranularity.NARROW); } } } if (location != null && location.isResolved() && !event.isEndTimeUnspecified()) { DateTime startTime, endTime; if (event.getStart().getDateTime() == null) { if (truck.getCategories().contains("AssumeNoTimeEqualsLunch")) { String dcs[] = event.getStart().getDate().toStringRfc3339().split("-"); startTime = new DateTime(Integer.parseInt(dcs[0]), Integer.parseInt(dcs[1]), Integer.parseInt(dcs[2]), 11, 0, clock.zone()); endTime = startTime.plusHours(2); } else { log.log(Level.WARNING, "Skipping {0} {1} because no time is specified", new Object[]{truck.getId(), location}); continue; } } else { startTime = new DateTime(event.getStart().getDateTime().getValue(), clock.zone()).plusHours(timezoneAdjustment); endTime = new DateTime(event.getEnd().getDateTime().getValue(), clock.zone()).plusHours(timezoneAdjustment); } String note = "Stop added from vendor's calendar"; Confidence confidence = Confidence.MEDIUM; final TruckStop truckStop = TruckStop.builder().truck(truck) .origin(StopOrigin.VENDORCAL) .location(location) .confidence(confidence) .appendNote(note) .startTime(startTime) .endTime(endTime) .build(); log.log(Level.INFO, "Loaded truckstop: {0}", truckStop); builder.add(truckStop); } else { log.log(Level.WARNING, "Location could not be resolved for {0}, {1} between {2} and {3}. Link: {4}", new Object[] {truck.getId(), where, range.getStart(), range.getEnd(), event.getHtmlLink()}); } } pageToken = events.getNextPageToken(); } while (pageToken != null); } catch (IOException e) { log.log(Level.SEVERE, "An error occurred while caching the schedule", e); } return builder.build(); } private @Nullable String enteredOn(com.google.api.client.util.DateTime entry) { try { return formatter.print(new DateTime(entry.getValue(), clock.zone())); } catch (Exception e) { log.log(Level.WARNING, e.getMessage(), e); return clock.nowFormattedAsTime(); } } // TODO: make this generic and pull it out private String coalesce(String st1, String st2) { return (Strings.isNullOrEmpty(st1)) ? st2 : st1; } private com.google.api.client.util.DateTime toGoogleDateTime(DateTime start) { return new com.google.api.client.util.DateTime(start.getMillis()); } }
src/main/java/foodtruck/schedule/GoogleCalendarV3Consumer.java
package foodtruck.schedule; import java.io.IOException; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import com.google.api.services.calendar.Calendar; import com.google.api.services.calendar.model.Event; import com.google.api.services.calendar.model.Events; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.Inject; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; import foodtruck.dao.TruckDAO; import foodtruck.geolocation.GeoLocator; import foodtruck.geolocation.GeolocationGranularity; import foodtruck.model.Location; import foodtruck.model.StopOrigin; import foodtruck.model.Truck; import foodtruck.model.TruckStop; import foodtruck.util.Clock; import foodtruck.util.FriendlyDateOnlyFormat; /** * @author aviolette * @since 11/20/14 */ public class GoogleCalendarV3Consumer implements ScheduleStrategy { private static final Logger log = Logger.getLogger(GoogleCalendarV3Consumer.class.getName()); private final Calendar calendarClient; private final TruckDAO truckDAO; private final AddressExtractor addressExtractor; private final GeoLocator geoLocator; private final Clock clock; private final DateTimeFormatter formatter; @Inject public GoogleCalendarV3Consumer(AddressExtractor addressExtractor, Calendar calendarClient, TruckDAO truckDAO, GeoLocator geoLocator, Clock clock, @FriendlyDateOnlyFormat DateTimeFormatter formatter) { this.calendarClient = calendarClient; this.truckDAO = truckDAO; this.addressExtractor = addressExtractor; this.geoLocator = geoLocator; this.clock = clock; this.formatter = formatter; } @Override public List<TruckStop> findForTime(Interval range, @Nullable Truck searchTruck) { String truckId = searchTruck == null ? null : searchTruck.getId(); log.info("Initiating calendar search " + truckId); List<TruckStop> stops = Lists.newLinkedList(); if (searchTruck != null && !Strings.isNullOrEmpty(searchTruck.getCalendarUrl())) { customCalendarSearch(range, searchTruck, stops); } else if (searchTruck == null) { for (Truck truck : truckDAO.findTrucksWithCalendars()) { customCalendarSearch(range, truck, stops); } } return stops; } private void customCalendarSearch(Interval range, Truck truck, List<TruckStop> stops) { try { final String calendarUrl = truck.getCalendarUrl(); if (Strings.isNullOrEmpty(calendarUrl)) { return; } log.info("Custom calendar search: " + calendarUrl); stops.addAll(performTruckSearch(range, truck)); } catch (RuntimeException rte) { log.info("Search truck: " + truck.getId()); log.log(Level.SEVERE, rte.getMessage(), rte); } } private List<TruckStop> performTruckSearch(Interval range, Truck truck) { ImmutableList.Builder<TruckStop> builder = ImmutableList.builder(); try { final String calendarId = truck.getCalendarUrl(); String pageToken = null; int timezoneAdjustment = truck.getTimezoneAdjustment(); do { Calendar.Events.List query = calendarClient.events().list(calendarId).setSingleEvents(true).setTimeMin( toGoogleDateTime(range.getStart())).setTimeMax(toGoogleDateTime(range.getEnd())).setPageToken(pageToken); Events events = query.execute(); List<Event> items = events.getItems(); for (Event event : items) { final String titleText = event.getSummary(); if (!Strings.isNullOrEmpty(titleText)) { String lowerTitle = titleText.toLowerCase(); if (lowerTitle.contains("private") || lowerTitle.contains("catering") || titleText.contains("TBD") || titleText.contains("TBA")) { log.log(Level.INFO, "Skipping {0} for {1}", new Object[]{titleText, truck.getId()}); continue; } } String where = event.getLocation(); Location location = null; if (!Strings.isNullOrEmpty(where)) { if (where.endsWith(", United States")) { where = where.substring(0, where.lastIndexOf(",")); // Fixes how google calendar normalizes fully-qualified addresses with a state, zip and country code } else if (where.lastIndexOf(", IL ") != -1) { where = where.substring(0, where.lastIndexOf(", IL ")) + ", IL"; } // HACK Alert, the address extractor doesn't handle non-Chicago addresses well, so // if it is a fully qualified address written by me, it will probably end in City, IL if (!where.endsWith(", IL")) { where = coalesce(Iterables.getFirst(addressExtractor.parse(where, truck), null), where); } location = geoLocator.locate(where, GeolocationGranularity.NARROW); } if (location == null || !location.isResolved()) { // Sometimes the location is in the title - try that too if (!Strings.isNullOrEmpty(titleText)) { where = titleText; log.info("Trying title text: " + titleText); final List<String> parsed = addressExtractor.parse(titleText, truck); String locString = Iterables.getFirst(parsed, null); if (locString == null) { log.info("Failed to parse titletext for address, trying whole thing: " + titleText); locString = titleText; } if (locString != null) { location = geoLocator.locate(locString, GeolocationGranularity.NARROW); } } } if (location != null && location.isResolved() && !event.isEndTimeUnspecified()) { DateTime startTime, endTime; if (event.getStart().getDateTime() == null) { if (truck.getCategories().contains("AssumeNoTimeEqualsLunch")) { String dcs[] = event.getStart().getDate().toStringRfc3339().split("-"); startTime = new DateTime(Integer.parseInt(dcs[0]), Integer.parseInt(dcs[1]), Integer.parseInt(dcs[2]), 11, 0, clock.zone()); endTime = startTime.plusHours(2); } else { log.log(Level.WARNING, "Skipping {0} {1} because no time is specified", new Object[]{truck.getId(), location}); continue; } } else { startTime = new DateTime(event.getStart().getDateTime().getValue(), clock.zone()).plusHours(timezoneAdjustment); endTime = new DateTime(event.getEnd().getDateTime().getValue(), clock.zone()).plusHours(timezoneAdjustment); } String note = "Stop added from vendor's calendar"; Confidence confidence = Confidence.MEDIUM; final TruckStop truckStop = TruckStop.builder().truck(truck) .origin(StopOrigin.VENDORCAL) .location(location) .confidence(confidence) .appendNote(note) .startTime(startTime) .endTime(endTime) .build(); log.log(Level.INFO, "Loaded truckstop: {0}", truckStop); builder.add(truckStop); } else { log.log(Level.WARNING, "Location could not be resolved for {0}, {1} between {2} and {3}. Link: {4}", new Object[] {truck.getId(), where, range.getStart(), range.getEnd(), event.getHtmlLink()}); } } pageToken = events.getNextPageToken(); } while (pageToken != null); } catch (IOException e) { log.log(Level.SEVERE, "An error occurred while caching the schedule", e); } return builder.build(); } private @Nullable String enteredOn(com.google.api.client.util.DateTime entry) { try { return formatter.print(new DateTime(entry.getValue(), clock.zone())); } catch (Exception e) { log.log(Level.WARNING, e.getMessage(), e); return clock.nowFormattedAsTime(); } } // TODO: make this generic and pull it out private String coalesce(String st1, String st2) { return (Strings.isNullOrEmpty(st1)) ? st2 : st1; } private com.google.api.client.util.DateTime toGoogleDateTime(DateTime start) { return new com.google.api.client.util.DateTime(start.getMillis()); } }
Added downtown chicago as a skip term
src/main/java/foodtruck/schedule/GoogleCalendarV3Consumer.java
Added downtown chicago as a skip term
<ide><path>rc/main/java/foodtruck/schedule/GoogleCalendarV3Consumer.java <ide> final String titleText = event.getSummary(); <ide> if (!Strings.isNullOrEmpty(titleText)) { <ide> String lowerTitle = titleText.toLowerCase(); <del> if (lowerTitle.contains("private") || lowerTitle.contains("catering") || titleText.contains("TBD") || titleText.contains("TBA")) { <add> if (lowerTitle.contains("private") || lowerTitle.contains("catering") || lowerTitle.contains("downtown chicago") || titleText.contains("TBD") || titleText.contains("TBA")) { <ide> log.log(Level.INFO, "Skipping {0} for {1}", new Object[]{titleText, truck.getId()}); <ide> continue; <ide> }
Java
mit
27d822dfa859df6aba422fd25ebc2d0b58bbaa02
0
r0n9/demo-springboot
package vip.fanrong; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import springfox.documentation.builders.ApiInfoBuilder; import springfox.documentation.builders.PathSelectors; import springfox.documentation.builders.RequestHandlerSelectors; import springfox.documentation.service.ApiInfo; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spring.web.plugins.Docket; import springfox.documentation.swagger2.annotations.EnableSwagger2; /** * Created by Rong on 2017/7/13. */ @Configuration @EnableSwagger2 public class Swagger2 { @Bean public Docket createRestApi() { return new Docket(DocumentationType.SWAGGER_2) .apiInfo(apiInfo()) .select() .apis(RequestHandlerSelectors.basePackage("vip.fanrong")) .paths(PathSelectors.any()).build(); } private ApiInfo apiInfo() { return new ApiInfoBuilder() .title("Spring Boot中使用Swagger2构建RESTful APIs") .description("初次尝试,后面会逐渐增加一些有意思的API。") .termsOfServiceUrl("") .contact("[email protected]") .version("1.0") .license("") .build(); } }
src/main/java/vip/fanrong/Swagger2.java
package vip.fanrong; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import springfox.documentation.builders.ApiInfoBuilder; import springfox.documentation.builders.PathSelectors; import springfox.documentation.builders.RequestHandlerSelectors; import springfox.documentation.service.ApiInfo; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spring.web.plugins.Docket; import springfox.documentation.swagger2.annotations.EnableSwagger2; /** * Created by Rong on 2017/7/13. */ @Configuration @EnableSwagger2 public class Swagger2 { @Bean public Docket createRestApi() { return new Docket(DocumentationType.SWAGGER_2) .apiInfo(apiInfo()) .select() .apis(RequestHandlerSelectors.basePackage("vip.fanrong")) .paths(PathSelectors.any()).build(); } private ApiInfo apiInfo() { return new ApiInfoBuilder() .title("Spring Boot中使用Swagger2构建RESTful APIs") .description("初次尝试,后面会逐渐增加一些有意思的API。") .termsOfServiceUrl("") .contact("[email protected]") .version("1.0") .build(); } }
build license for API info
src/main/java/vip/fanrong/Swagger2.java
build license for API info
<ide><path>rc/main/java/vip/fanrong/Swagger2.java <ide> .termsOfServiceUrl("") <ide> .contact("[email protected]") <ide> .version("1.0") <add> .license("") <ide> .build(); <ide> } <ide>
Java
agpl-3.0
3a4f0ff44d8b6a0f8dc207830e24f17fec209991
0
kelvinmbwilo/vims,USAID-DELIVER-PROJECT/elmis,vimsvarcode/elmis,kelvinmbwilo/vims,vimsvarcode/elmis,vimsvarcode/elmis,kelvinmbwilo/vims,USAID-DELIVER-PROJECT/elmis,OpenLMIS/open-lmis,OpenLMIS/open-lmis,vimsvarcode/elmis,USAID-DELIVER-PROJECT/elmis,OpenLMIS/open-lmis,vimsvarcode/elmis,USAID-DELIVER-PROJECT/elmis,kelvinmbwilo/vims,OpenLMIS/open-lmis
package org.openlmis.email.repository.mapper; import org.apache.ibatis.annotations.Insert; import org.springframework.mail.SimpleMailMessage; import org.springframework.stereotype.Repository; @Repository public interface EmailNotificationMapper { @Insert("INSERT INTO email_notifications(receiver, subject, content, sent) VALUES (#{to}, #{subject}, #{text}, false)") Integer insert(SimpleMailMessage message); }
modules/email/src/main/java/org/openlmis/email/repository/mapper/EmailNotificationMapper.java
package org.openlmis.email.repository.mapper; import org.apache.ibatis.annotations.Insert; import org.springframework.mail.SimpleMailMessage; import org.springframework.stereotype.Repository; @Repository public interface EmailNotificationMapper { @Insert("INSERT INTO email_notifications(receiver, subject, content, sent) VALUES (#{receiver}, #{subject}, #{text}, false)") Integer insert(SimpleMailMessage message); }
minor change to insert the "to" field instead of the receiver field.
modules/email/src/main/java/org/openlmis/email/repository/mapper/EmailNotificationMapper.java
minor change to insert the "to" field instead of the receiver field.
<ide><path>odules/email/src/main/java/org/openlmis/email/repository/mapper/EmailNotificationMapper.java <ide> @Repository <ide> public interface EmailNotificationMapper { <ide> <del> @Insert("INSERT INTO email_notifications(receiver, subject, content, sent) VALUES (#{receiver}, #{subject}, #{text}, false)") <add> @Insert("INSERT INTO email_notifications(receiver, subject, content, sent) VALUES (#{to}, #{subject}, #{text}, false)") <ide> Integer insert(SimpleMailMessage message); <ide> }
Java
apache-2.0
d2fc24e7920ffaae7eaaf6fd532121b78ec9cc78
0
i2p/i2p.itoopie,i2p/i2p.itoopie,i2p/i2p.itoopie,i2p/i2p.itoopie
/* I2PTunnel is GPL'ed (with the exception mentioned in I2PTunnel.java) * (c) 2003 - 2004 mihi */ package net.i2p.i2ptunnel; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.Socket; import java.net.SocketException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.StringTokenizer; import net.i2p.I2PAppContext; import net.i2p.I2PException; import net.i2p.client.streaming.I2PSocket; import net.i2p.data.DataFormatException; import net.i2p.data.Destination; import net.i2p.util.Clock; import net.i2p.util.EventDispatcher; import net.i2p.util.I2PThread; import net.i2p.util.Log; /** * Act as a mini HTTP proxy, handling various different types of requests, * forwarding them through I2P appropriately, and displaying the reply. Supported * request formats are: <pre> * $method http://$site[$port]/$path $protocolVersion * or * $method $path $protocolVersion\nHost: $site * or * $method http://i2p/$site/$path $protocolVersion * or * $method /$site/$path $protocolVersion * </pre> * * If the $site resolves with the I2P naming service, then it is directed towards * that eepsite, otherwise it is directed towards this client's outproxy (typically * "squid.i2p"). Only HTTP is supported (no HTTPS, ftp, mailto, etc). Both GET * and POST have been tested, though other $methods should work. * */ public class I2PTunnelHTTPClient extends I2PTunnelClientBase implements Runnable { private static final Log _log = new Log(I2PTunnelHTTPClient.class); private List proxyList; private final static byte[] ERR_REQUEST_DENIED = ("HTTP/1.1 403 Access Denied\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: REQUEST DENIED</H1>"+ "You attempted to connect to a non-I2P website or location.<BR>") .getBytes(); private final static byte[] ERR_DESTINATION_UNKNOWN = ("HTTP/1.1 503 Service Unavailable\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: DESTINATION NOT FOUND</H1>"+ "That I2P Destination was not found. Perhaps you pasted in the "+ "wrong BASE64 I2P Destination or the link you are following is "+ "bad. The host (or the WWW proxy, if you're using one) could also "+ "be temporarily offline. You may want to <b>retry</b>. "+ "Could not find the following Destination:<BR><BR>") .getBytes(); private final static byte[] ERR_TIMEOUT = ("HTTP/1.1 504 Gateway Timeout\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n\r\n"+ "<html><body><H1>I2P ERROR: TIMEOUT</H1>"+ "That Destination was reachable, but timed out getting a "+ "response. This is likely a temporary error, so you should simply "+ "try to refresh, though if the problem persists, the remote "+ "destination may have issues. Could not get a response from "+ "the following Destination:<BR><BR>") .getBytes(); private final static byte[] ERR_NO_OUTPROXY = ("HTTP/1.1 503 Service Unavailable\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: No outproxy found</H1>"+ "Your request was for a site outside of I2P, but you have no "+ "HTTP outproxy configured. Please configure an outproxy in I2PTunnel") .getBytes(); /** used to assign unique IDs to the threads / clients. no logic or functionality */ private static volatile long __clientId = 0; /** * @throws IllegalArgumentException if the I2PTunnel does not contain * valid config to contact the router */ public I2PTunnelHTTPClient(int localPort, Logging l, boolean ownDest, String wwwProxy, EventDispatcher notifyThis, I2PTunnel tunnel) throws IllegalArgumentException { super(localPort, ownDest, l, notifyThis, "HTTPHandler " + (++__clientId), tunnel); if (waitEventValue("openBaseClientResult").equals("error")) { notifyEvent("openHTTPClientResult", "error"); return; } proxyList = new ArrayList(); if (wwwProxy != null) { StringTokenizer tok = new StringTokenizer(wwwProxy, ","); while (tok.hasMoreTokens()) proxyList.add(tok.nextToken().trim()); } setName(getLocalPort() + " -> HTTPClient [WWW outproxy list: " + wwwProxy + "]"); startRunning(); notifyEvent("openHTTPClientResult", "ok"); } private String getPrefix(long requestId) { return "Client[" + _clientId + "/" + requestId + "]: "; } private String selectProxy() { synchronized (proxyList) { int size = proxyList.size(); if (size <= 0) { if (_log.shouldLog(Log.INFO)) _log.info("Proxy list is empty - no outproxy available"); l.log("Proxy list is emtpy - no outproxy available"); return null; } int index = I2PAppContext.getGlobalContext().random().nextInt(size); if (index >= size) index = size - 1; if (index < 0) return null; String proxy = (String)proxyList.get(index); return proxy; } } private static long __requestId = 0; protected void clientConnectionRun(Socket s) { OutputStream out = null; String targetRequest = null; boolean usingWWWProxy = false; String currentProxy = null; InactivityTimeoutThread timeoutThread = null; long requestId = ++__requestId; try { out = s.getOutputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(s.getInputStream(), "ISO-8859-1")); String line, method = null, protocol = null, host = null, destination = null; StringBuffer newRequest = new StringBuffer(); while ((line = br.readLine()) != null) { if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix(requestId) + "Line=[" + line + "]"); if (line.startsWith("Connection: ") || line.startsWith("Keep-Alive: ") || line.startsWith("Proxy-Connection: ")) continue; if (method == null) { // first line (GET /base64/realaddr) if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix(requestId) + "Method is null for [" + line + "]"); int pos = line.indexOf(" "); if (pos == -1) break; method = line.substring(0, pos); String request = line.substring(pos + 1); if (request.startsWith("/") && getTunnel().getClientOptions().getProperty("i2ptunnel.noproxy") != null) { request = "http://i2p" + request; } pos = request.indexOf("//"); if (pos == -1) { method = null; break; } protocol = request.substring(0, pos + 2); request = request.substring(pos + 2); targetRequest = request; pos = request.indexOf("/"); if (pos == -1) { method = null; break; } host = request.substring(0, pos); // Quick hack for foo.bar.i2p if (host.toLowerCase().endsWith(".i2p")) { destination = host; host = getHostName(destination); line = method + " " + request.substring(pos); } else if (host.indexOf(".") != -1) { // The request must be forwarded to a WWW proxy if (_log.shouldLog(Log.DEBUG)) _log.debug("Before selecting outproxy for " + host); currentProxy = selectProxy(); if (_log.shouldLog(Log.DEBUG)) _log.debug("After selecting outproxy for " + host + ": " + currentProxy); if (currentProxy == null) { if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix(requestId) + "Host wants to be outproxied, but we dont have any!"); l.log("No HTTP outproxy found for the request."); if (out != null) { out.write(ERR_NO_OUTPROXY); out.write("<p /><i>Generated on: ".getBytes()); out.write(new Date().toString().getBytes()); out.write("</i></body></html>\n".getBytes()); out.flush(); } s.close(); return; } destination = currentProxy; usingWWWProxy = true; if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix(requestId) + "Host doesnt end with .i2p and it contains a period [" + host + "]: wwwProxy!"); } else { request = request.substring(pos + 1); pos = request.indexOf("/"); destination = request.substring(0, pos); line = method + " " + request.substring(pos); } boolean isValid = usingWWWProxy || isSupportedAddress(host, protocol); if (!isValid) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "notValid(" + host + ")"); method = null; destination = null; break; } else if (!usingWWWProxy) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "host=getHostName(" + destination + ")"); host = getHostName(destination); // hide original host } if (_log.shouldLog(Log.DEBUG)) { _log.debug(getPrefix(requestId) + "METHOD:" + method + ":"); _log.debug(getPrefix(requestId) + "PROTOC:" + protocol + ":"); _log.debug(getPrefix(requestId) + "HOST :" + host + ":"); _log.debug(getPrefix(requestId) + "DEST :" + destination + ":"); } } else { if (line.startsWith("Host: ") && !usingWWWProxy) { line = "Host: " + host; if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "Setting host = " + host); } } if (line.length() == 0) { newRequest.append("Connection: close\r\n\r\n"); break; } else { newRequest.append(line).append("\r\n"); // HTTP spec } } if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix(requestId) + "NewRequest header: [" + newRequest.toString() + "]"); while (br.ready()) { // empty the buffer (POST requests) int i = br.read(); if (i != -1) { newRequest.append((char) i); } } if (method == null || destination == null) { l.log("No HTTP method found in the request."); if (out != null) { out.write(ERR_REQUEST_DENIED); out.write("<p /><i>Generated on: ".getBytes()); out.write(new Date().toString().getBytes()); out.write("</i></body></html>\n".getBytes()); out.flush(); } s.close(); return; } if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix(requestId) + "Destination: " + destination); Destination dest = I2PTunnel.destFromName(destination); if (dest == null) { l.log("Could not resolve " + destination + "."); if (_log.shouldLog(Log.WARN)) _log.warn("Unable to resolve " + destination + " (proxy? " + usingWWWProxy + ", request: " + targetRequest); writeErrorMessage(ERR_DESTINATION_UNKNOWN, out, targetRequest, usingWWWProxy, destination); s.close(); return; } String remoteID; I2PSocket i2ps = createI2PSocket(dest); byte[] data = newRequest.toString().getBytes("ISO-8859-1"); I2PTunnelRunner runner = new I2PTunnelRunner(s, i2ps, sockLock, data); timeoutThread = new InactivityTimeoutThread(runner, out, targetRequest, usingWWWProxy, currentProxy, s, requestId); timeoutThread.start(); } catch (SocketException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info(getPrefix(requestId) + "Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); closeSocket(s); } catch (IOException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info(getPrefix(requestId) + "Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); closeSocket(s); } catch (I2PException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info("getPrefix(requestId) + Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); closeSocket(s); } } private static final long INACTIVITY_TIMEOUT = 120 * 1000; private static volatile long __timeoutId = 0; private class InactivityTimeoutThread extends I2PThread { private Socket s; private I2PTunnelRunner _runner; private OutputStream _out; private String _targetRequest; private boolean _useWWWProxy; private String _currentProxy; private long _requestId; private boolean _disabled; private Object _disableLock = new Object(); public InactivityTimeoutThread(I2PTunnelRunner runner, OutputStream out, String targetRequest, boolean useWWWProxy, String currentProxy, Socket s, long requestId) { this.s = s; _runner = runner; _out = out; _targetRequest = targetRequest; _useWWWProxy = useWWWProxy; _currentProxy = currentProxy; _disabled = false; _requestId = requestId; long timeoutId = ++__timeoutId; setName("InactivityThread " + getPrefix(requestId) + timeoutId); } public void disable() { _disabled = true; synchronized (_disableLock) { _disableLock.notifyAll(); } } public void run() { while (!_disabled) { if (_runner.isFinished()) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(_requestId) + "HTTP client request completed prior to timeout"); return; } if (_runner.getLastActivityOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { if (_runner.getStartedOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix(_requestId) + "HTTP client request timed out (lastActivity: " + new Date(_runner.getLastActivityOn()) + ", startedOn: " + new Date(_runner.getStartedOn()) + ")"); timeout(); return; } else { // runner hasn't been going to long enough } } else { // there has been activity in the period } synchronized (_disableLock) { try { _disableLock.wait(INACTIVITY_TIMEOUT); } catch (InterruptedException ie) { } } } } private void timeout() { _log.info(getPrefix(_requestId) + "Inactivity timeout reached"); l.log("Inactivity timeout reached"); if (_out != null) { try { if (_runner.getLastActivityOn() > 0) { // some data has been sent, so don't 404 it } else { writeErrorMessage(ERR_TIMEOUT, _out, _targetRequest, _useWWWProxy, _currentProxy); } } catch (IOException ioe) { _log.warn(getPrefix(_requestId) + "Error writing out the 'timeout' message", ioe); } } else { _log.warn(getPrefix(_requestId) + "Client disconnected before we could say we timed out"); } closeSocket(s); } } private final static String getHostName(String host) { if (host == null) return null; try { Destination dest = I2PTunnel.destFromName(host); if (dest == null) return "i2p"; return dest.toBase64(); } catch (DataFormatException dfe) { return "i2p"; } } private static void writeErrorMessage(byte[] errMessage, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) throws IOException { if (out != null) { out.write(errMessage); if (targetRequest != null) { out.write(targetRequest.getBytes()); if (usingWWWProxy) out.write(("<br>WWW proxy: " + wwwProxy).getBytes()); } out.write("<p /><i>Generated on: ".getBytes()); out.write(new Date().toString().getBytes()); out.write("</i></body></html>\n".getBytes()); out.flush(); } } private void handleHTTPClientException(Exception ex, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy, long requestId) { if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix(requestId) + "Error sending to " + wwwProxy + " (proxy? " + usingWWWProxy + ", request: " + targetRequest, ex); if (out != null) { try { writeErrorMessage(ERR_DESTINATION_UNKNOWN, out, targetRequest, usingWWWProxy, wwwProxy); } catch (IOException ioe) { _log.warn(getPrefix(requestId) + "Error writing out the 'destination was unknown' " + "message", ioe); } } else { _log.warn(getPrefix(requestId) + "Client disconnected before we could say that destination " + "was unknown", ex); } } private final static String SUPPORTED_HOSTS[] = { "i2p", "www.i2p.com", "i2p."}; private boolean isSupportedAddress(String host, String protocol) { if ((host == null) || (protocol == null)) return false; boolean found = false; String lcHost = host.toLowerCase(); for (int i = 0; i < SUPPORTED_HOSTS.length; i++) { if (SUPPORTED_HOSTS[i].equals(lcHost)) { found = true; break; } } if (!found) { try { Destination d = I2PTunnel.destFromName(host); if (d == null) return false; } catch (DataFormatException dfe) { } } return protocol.equalsIgnoreCase("http://"); } }
apps/i2ptunnel/java/src/net/i2p/i2ptunnel/I2PTunnelHTTPClient.java
/* I2PTunnel is GPL'ed (with the exception mentioned in I2PTunnel.java) * (c) 2003 - 2004 mihi */ package net.i2p.i2ptunnel; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.Socket; import java.net.SocketException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.StringTokenizer; import net.i2p.I2PAppContext; import net.i2p.I2PException; import net.i2p.client.streaming.I2PSocket; import net.i2p.data.DataFormatException; import net.i2p.data.Destination; import net.i2p.util.Clock; import net.i2p.util.EventDispatcher; import net.i2p.util.I2PThread; import net.i2p.util.Log; /** * Act as a mini HTTP proxy, handling various different types of requests, * forwarding them through I2P appropriately, and displaying the reply. Supported * request formats are: <pre> * $method http://$site[$port]/$path $protocolVersion * or * $method $path $protocolVersion\nHost: $site * or * $method http://i2p/$site/$path $protocolVersion * or * $method /$site/$path $protocolVersion * </pre> * * If the $site resolves with the I2P naming service, then it is directed towards * that eepsite, otherwise it is directed towards this client's outproxy (typically * "squid.i2p"). Only HTTP is supported (no HTTPS, ftp, mailto, etc). Both GET * and POST have been tested, though other $methods should work. * */ public class I2PTunnelHTTPClient extends I2PTunnelClientBase implements Runnable { private static final Log _log = new Log(I2PTunnelHTTPClient.class); private List proxyList; private final static byte[] ERR_REQUEST_DENIED = ("HTTP/1.1 403 Access Denied\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: REQUEST DENIED</H1>"+ "You attempted to connect to a non-I2P website or location.<BR>") .getBytes(); private final static byte[] ERR_DESTINATION_UNKNOWN = ("HTTP/1.1 503 Service Unavailable\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n"+ "\r\n"+ "<html><body><H1>I2P ERROR: DESTINATION NOT FOUND</H1>"+ "That I2P Destination was not found. Perhaps you pasted in the "+ "wrong BASE64 I2P Destination or the link you are following is "+ "bad. The host (or the WWW proxy, if you're using one) could also "+ "be temporarily offline. You may want to <b>retry</b>. "+ "Could not find the following Destination:<BR><BR>") .getBytes(); private final static byte[] ERR_TIMEOUT = ("HTTP/1.1 504 Gateway Timeout\r\n"+ "Content-Type: text/html; charset=iso-8859-1\r\n"+ "Cache-control: no-cache\r\n\r\n"+ "<html><body><H1>I2P ERROR: TIMEOUT</H1>"+ "That Destination was reachable, but timed out getting a "+ "response. This is likely a temporary error, so you should simply "+ "try to refresh, though if the problem persists, the remote "+ "destination may have issues. Could not get a response from "+ "the following Destination:<BR><BR>") .getBytes(); /** used to assign unique IDs to the threads / clients. no logic or functionality */ private static volatile long __clientId = 0; /** * @throws IllegalArgumentException if the I2PTunnel does not contain * valid config to contact the router */ public I2PTunnelHTTPClient(int localPort, Logging l, boolean ownDest, String wwwProxy, EventDispatcher notifyThis, I2PTunnel tunnel) throws IllegalArgumentException { super(localPort, ownDest, l, notifyThis, "HTTPHandler " + (++__clientId), tunnel); if (waitEventValue("openBaseClientResult").equals("error")) { notifyEvent("openHTTPClientResult", "error"); return; } proxyList = new ArrayList(); if (wwwProxy != null) { StringTokenizer tok = new StringTokenizer(wwwProxy, ","); while (tok.hasMoreTokens()) proxyList.add(tok.nextToken().trim()); } setName(getLocalPort() + " -> HTTPClient [WWW outproxy list: " + wwwProxy + "]"); startRunning(); notifyEvent("openHTTPClientResult", "ok"); } private String getPrefix() { return "Client[" + _clientId + "]: "; } private String selectProxy() { if (proxyList.size() <= 0) { l.log("Proxy list is emtpy - no outproxy available"); return null; } int index = I2PAppContext.getGlobalContext().random().nextInt(proxyList.size()); return (String)proxyList.get(index); } protected void clientConnectionRun(Socket s) { OutputStream out = null; String targetRequest = null; boolean usingWWWProxy = false; String currentProxy = null; InactivityTimeoutThread timeoutThread = null; try { out = s.getOutputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(s.getInputStream(), "ISO-8859-1")); String line, method = null, protocol = null, host = null, destination = null; StringBuffer newRequest = new StringBuffer(); while ((line = br.readLine()) != null) { if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix() + "Line=[" + line + "]"); if (line.startsWith("Connection: ") || line.startsWith("Keep-Alive: ") || line.startsWith("Proxy-Connection: ")) continue; if (method == null) { // first line (GET /base64/realaddr) if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix() + "Method is null for [" + line + "]"); int pos = line.indexOf(" "); if (pos == -1) break; method = line.substring(0, pos); String request = line.substring(pos + 1); if (request.startsWith("/") && getTunnel().getClientOptions().getProperty("i2ptunnel.noproxy") != null) { request = "http://i2p" + request; } pos = request.indexOf("//"); if (pos == -1) { method = null; break; } protocol = request.substring(0, pos + 2); request = request.substring(pos + 2); targetRequest = request; pos = request.indexOf("/"); if (pos == -1) { method = null; break; } host = request.substring(0, pos); // Quick hack for foo.bar.i2p if (host.toLowerCase().endsWith(".i2p")) { destination = host; host = getHostName(destination); line = method + " " + request.substring(pos); } else if (host.indexOf(".") != -1) { // The request must be forwarded to a WWW proxy currentProxy = selectProxy(); destination = currentProxy; usingWWWProxy = true; if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix() + "Host doesnt end with .i2p and it contains a period [" + host + "]: wwwProxy!"); } else { request = request.substring(pos + 1); pos = request.indexOf("/"); destination = request.substring(0, pos); line = method + " " + request.substring(pos); } boolean isValid = usingWWWProxy || isSupportedAddress(host, protocol); if (!isValid) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "notValid(" + host + ")"); method = null; destination = null; break; } else if (!usingWWWProxy) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "host=getHostName(" + destination + ")"); host = getHostName(destination); // hide original host } if (_log.shouldLog(Log.DEBUG)) { _log.debug(getPrefix() + "METHOD:" + method + ":"); _log.debug(getPrefix() + "PROTOC:" + protocol + ":"); _log.debug(getPrefix() + "HOST :" + host + ":"); _log.debug(getPrefix() + "DEST :" + destination + ":"); } } else { if (line.startsWith("Host: ") && !usingWWWProxy) { line = "Host: " + host; if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "Setting host = " + host); } } if (line.length() == 0) { newRequest.append("Connection: close\r\n\r\n"); break; } else { newRequest.append(line).append("\r\n"); // HTTP spec } } if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix() + "NewRequest header: [" + newRequest.toString() + "]"); while (br.ready()) { // empty the buffer (POST requests) int i = br.read(); if (i != -1) { newRequest.append((char) i); } } if (method == null || destination == null) { l.log("No HTTP method found in the request."); if (out != null) { out.write(ERR_REQUEST_DENIED); out.write("<p /><i>Generated on: ".getBytes()); out.write(new Date().toString().getBytes()); out.write("</i></body></html>\n".getBytes()); out.flush(); } s.close(); return; } if (_log.shouldLog(Log.DEBUG)) _log.debug(getPrefix() + "Destination: " + destination); Destination dest = I2PTunnel.destFromName(destination); if (dest == null) { l.log("Could not resolve " + destination + "."); if (_log.shouldLog(Log.WARN)) _log.warn("Unable to resolve " + destination + " (proxy? " + usingWWWProxy + ", request: " + targetRequest); writeErrorMessage(ERR_DESTINATION_UNKNOWN, out, targetRequest, usingWWWProxy, destination); s.close(); return; } String remoteID; I2PSocket i2ps = createI2PSocket(dest); byte[] data = newRequest.toString().getBytes("ISO-8859-1"); I2PTunnelRunner runner = new I2PTunnelRunner(s, i2ps, sockLock, data); timeoutThread = new InactivityTimeoutThread(runner, out, targetRequest, usingWWWProxy, currentProxy, s); timeoutThread.start(); } catch (SocketException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info(getPrefix() + "Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); closeSocket(s); } catch (IOException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info(getPrefix() + "Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); closeSocket(s); } catch (I2PException ex) { if (timeoutThread != null) timeoutThread.disable(); _log.info("getPrefix() + Error trying to connect", ex); l.log(ex.getMessage()); handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); closeSocket(s); } } private static final long INACTIVITY_TIMEOUT = 120 * 1000; private static volatile long __timeoutId = 0; private class InactivityTimeoutThread extends I2PThread { private Socket s; private I2PTunnelRunner _runner; private OutputStream _out; private String _targetRequest; private boolean _useWWWProxy; private String _currentProxy; private boolean _disabled; private Object _disableLock = new Object(); public InactivityTimeoutThread(I2PTunnelRunner runner, OutputStream out, String targetRequest, boolean useWWWProxy, String currentProxy, Socket s) { this.s = s; _runner = runner; _out = out; _targetRequest = targetRequest; _useWWWProxy = useWWWProxy; _currentProxy = currentProxy; _disabled = false; long timeoutId = ++__timeoutId; setName("InactivityThread " + getPrefix() + timeoutId); } public void disable() { _disabled = true; synchronized (_disableLock) { _disableLock.notifyAll(); } } public void run() { while (!_disabled) { if (_runner.isFinished()) { if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "HTTP client request completed prior to timeout"); return; } if (_runner.getLastActivityOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { if (_runner.getStartedOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { if (_log.shouldLog(Log.WARN)) _log.warn(getPrefix() + "HTTP client request timed out (lastActivity: " + new Date(_runner.getLastActivityOn()) + ", startedOn: " + new Date(_runner.getStartedOn()) + ")"); timeout(); return; } else { // runner hasn't been going to long enough } } else { // there has been activity in the period } synchronized (_disableLock) { try { _disableLock.wait(INACTIVITY_TIMEOUT); } catch (InterruptedException ie) { } } } } private void timeout() { _log.info(getPrefix() + "Inactivity timeout reached"); l.log("Inactivity timeout reached"); if (_out != null) { try { if (_runner.getLastActivityOn() > 0) { // some data has been sent, so don't 404 it } else { writeErrorMessage(ERR_TIMEOUT, _out, _targetRequest, _useWWWProxy, _currentProxy); } } catch (IOException ioe) { _log.warn(getPrefix() + "Error writing out the 'timeout' message", ioe); } } else { _log.warn(getPrefix() + "Client disconnected before we could say we timed out"); } closeSocket(s); } } private final static String getHostName(String host) { try { Destination dest = I2PTunnel.destFromName(host); if (dest == null) return "i2p"; return dest.toBase64(); } catch (DataFormatException dfe) { return "i2p"; } } private static void writeErrorMessage(byte[] errMessage, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) throws IOException { if (out != null) { out.write(errMessage); if (targetRequest != null) { out.write(targetRequest.getBytes()); if (usingWWWProxy) out.write(("<br>WWW proxy: " + wwwProxy).getBytes()); } out.write("<p /><i>Generated on: ".getBytes()); out.write(new Date().toString().getBytes()); out.write("</i></body></html>\n".getBytes()); out.flush(); } } private void handleHTTPClientException(Exception ex, OutputStream out, String targetRequest, boolean usingWWWProxy, String wwwProxy) { if (_log.shouldLog(Log.WARN)) _log.warn("Error sending to " + wwwProxy + " (proxy? " + usingWWWProxy + ", request: " + targetRequest, ex); if (out != null) { try { writeErrorMessage(ERR_DESTINATION_UNKNOWN, out, targetRequest, usingWWWProxy, wwwProxy); } catch (IOException ioe) { _log.warn(getPrefix() + "Error writing out the 'destination was unknown' " + "message", ioe); } } else { _log.warn(getPrefix() + "Client disconnected before we could say that destination " + "was unknown", ex); } } private final static String SUPPORTED_HOSTS[] = { "i2p", "www.i2p.com", "i2p."}; private boolean isSupportedAddress(String host, String protocol) { if ((host == null) || (protocol == null)) return false; boolean found = false; String lcHost = host.toLowerCase(); for (int i = 0; i < SUPPORTED_HOSTS.length; i++) { if (SUPPORTED_HOSTS[i].equals(lcHost)) { found = true; break; } } if (!found) { try { Destination d = I2PTunnel.destFromName(host); if (d == null) return false; } catch (DataFormatException dfe) { } } return protocol.equalsIgnoreCase("http://"); } }
deal with no proxy available more carefully retrieve a proxy logging (w/ unique requestId)
apps/i2ptunnel/java/src/net/i2p/i2ptunnel/I2PTunnelHTTPClient.java
deal with no proxy available more carefully retrieve a proxy logging (w/ unique requestId)
<ide><path>pps/i2ptunnel/java/src/net/i2p/i2ptunnel/I2PTunnelHTTPClient.java <ide> "the following Destination:<BR><BR>") <ide> .getBytes(); <ide> <add> private final static byte[] ERR_NO_OUTPROXY = <add> ("HTTP/1.1 503 Service Unavailable\r\n"+ <add> "Content-Type: text/html; charset=iso-8859-1\r\n"+ <add> "Cache-control: no-cache\r\n"+ <add> "\r\n"+ <add> "<html><body><H1>I2P ERROR: No outproxy found</H1>"+ <add> "Your request was for a site outside of I2P, but you have no "+ <add> "HTTP outproxy configured. Please configure an outproxy in I2PTunnel") <add> .getBytes(); <add> <ide> /** used to assign unique IDs to the threads / clients. no logic or functionality */ <ide> private static volatile long __clientId = 0; <ide> <ide> notifyEvent("openHTTPClientResult", "ok"); <ide> } <ide> <del> private String getPrefix() { return "Client[" + _clientId + "]: "; } <add> private String getPrefix(long requestId) { return "Client[" + _clientId + "/" + requestId + "]: "; } <ide> <ide> private String selectProxy() { <del> if (proxyList.size() <= 0) { <del> l.log("Proxy list is emtpy - no outproxy available"); <del> return null; <del> } <del> int index = I2PAppContext.getGlobalContext().random().nextInt(proxyList.size()); <del> return (String)proxyList.get(index); <add> synchronized (proxyList) { <add> int size = proxyList.size(); <add> if (size <= 0) { <add> if (_log.shouldLog(Log.INFO)) <add> _log.info("Proxy list is empty - no outproxy available"); <add> l.log("Proxy list is emtpy - no outproxy available"); <add> return null; <add> } <add> int index = I2PAppContext.getGlobalContext().random().nextInt(size); <add> if (index >= size) index = size - 1; <add> if (index < 0) return null; <add> String proxy = (String)proxyList.get(index); <add> return proxy; <add> } <ide> } <ide> <add> private static long __requestId = 0; <ide> protected void clientConnectionRun(Socket s) { <ide> OutputStream out = null; <ide> String targetRequest = null; <ide> boolean usingWWWProxy = false; <ide> String currentProxy = null; <ide> InactivityTimeoutThread timeoutThread = null; <add> long requestId = ++__requestId; <ide> try { <ide> out = s.getOutputStream(); <ide> BufferedReader br = new BufferedReader(new InputStreamReader(s.getInputStream(), "ISO-8859-1")); <ide> StringBuffer newRequest = new StringBuffer(); <ide> while ((line = br.readLine()) != null) { <ide> if (_log.shouldLog(Log.DEBUG)) <del> _log.debug(getPrefix() + "Line=[" + line + "]"); <add> _log.debug(getPrefix(requestId) + "Line=[" + line + "]"); <ide> <ide> if (line.startsWith("Connection: ") || <ide> line.startsWith("Keep-Alive: ") || <ide> <ide> if (method == null) { // first line (GET /base64/realaddr) <ide> if (_log.shouldLog(Log.DEBUG)) <del> _log.debug(getPrefix() + "Method is null for [" + line + "]"); <add> _log.debug(getPrefix(requestId) + "Method is null for [" + line + "]"); <ide> <ide> int pos = line.indexOf(" "); <ide> if (pos == -1) break; <ide> line = method + " " + request.substring(pos); <ide> } else if (host.indexOf(".") != -1) { <ide> // The request must be forwarded to a WWW proxy <add> if (_log.shouldLog(Log.DEBUG)) <add> _log.debug("Before selecting outproxy for " + host); <ide> currentProxy = selectProxy(); <add> if (_log.shouldLog(Log.DEBUG)) <add> _log.debug("After selecting outproxy for " + host + ": " + currentProxy); <add> if (currentProxy == null) { <add> if (_log.shouldLog(Log.WARN)) <add> _log.warn(getPrefix(requestId) + "Host wants to be outproxied, but we dont have any!"); <add> l.log("No HTTP outproxy found for the request."); <add> if (out != null) { <add> out.write(ERR_NO_OUTPROXY); <add> out.write("<p /><i>Generated on: ".getBytes()); <add> out.write(new Date().toString().getBytes()); <add> out.write("</i></body></html>\n".getBytes()); <add> out.flush(); <add> } <add> s.close(); <add> return; <add> } <ide> destination = currentProxy; <ide> usingWWWProxy = true; <ide> if (_log.shouldLog(Log.DEBUG)) <del> _log.debug(getPrefix() + "Host doesnt end with .i2p and it contains a period [" + host + "]: wwwProxy!"); <add> _log.debug(getPrefix(requestId) + "Host doesnt end with .i2p and it contains a period [" + host + "]: wwwProxy!"); <ide> } else { <ide> request = request.substring(pos + 1); <ide> pos = request.indexOf("/"); <ide> <ide> boolean isValid = usingWWWProxy || isSupportedAddress(host, protocol); <ide> if (!isValid) { <del> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "notValid(" + host + ")"); <add> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "notValid(" + host + ")"); <ide> method = null; <ide> destination = null; <ide> break; <ide> } else if (!usingWWWProxy) { <del> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "host=getHostName(" + destination + ")"); <add> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(requestId) + "host=getHostName(" + destination + ")"); <ide> host = getHostName(destination); // hide original host <ide> } <ide> <ide> if (_log.shouldLog(Log.DEBUG)) { <del> _log.debug(getPrefix() + "METHOD:" + method + ":"); <del> _log.debug(getPrefix() + "PROTOC:" + protocol + ":"); <del> _log.debug(getPrefix() + "HOST :" + host + ":"); <del> _log.debug(getPrefix() + "DEST :" + destination + ":"); <add> _log.debug(getPrefix(requestId) + "METHOD:" + method + ":"); <add> _log.debug(getPrefix(requestId) + "PROTOC:" + protocol + ":"); <add> _log.debug(getPrefix(requestId) + "HOST :" + host + ":"); <add> _log.debug(getPrefix(requestId) + "DEST :" + destination + ":"); <ide> } <ide> <ide> } else { <ide> if (line.startsWith("Host: ") && !usingWWWProxy) { <ide> line = "Host: " + host; <ide> if (_log.shouldLog(Log.INFO)) <del> _log.info(getPrefix() + "Setting host = " + host); <add> _log.info(getPrefix(requestId) + "Setting host = " + host); <ide> } <ide> } <ide> <ide> } <ide> } <ide> if (_log.shouldLog(Log.DEBUG)) <del> _log.debug(getPrefix() + "NewRequest header: [" + newRequest.toString() + "]"); <add> _log.debug(getPrefix(requestId) + "NewRequest header: [" + newRequest.toString() + "]"); <ide> <ide> while (br.ready()) { // empty the buffer (POST requests) <ide> int i = br.read(); <ide> } <ide> <ide> if (_log.shouldLog(Log.DEBUG)) <del> _log.debug(getPrefix() + "Destination: " + destination); <add> _log.debug(getPrefix(requestId) + "Destination: " + destination); <ide> <ide> Destination dest = I2PTunnel.destFromName(destination); <ide> if (dest == null) { <ide> I2PSocket i2ps = createI2PSocket(dest); <ide> byte[] data = newRequest.toString().getBytes("ISO-8859-1"); <ide> I2PTunnelRunner runner = new I2PTunnelRunner(s, i2ps, sockLock, data); <del> timeoutThread = new InactivityTimeoutThread(runner, out, targetRequest, usingWWWProxy, currentProxy, s); <add> timeoutThread = new InactivityTimeoutThread(runner, out, targetRequest, usingWWWProxy, currentProxy, s, requestId); <ide> timeoutThread.start(); <ide> } catch (SocketException ex) { <ide> if (timeoutThread != null) timeoutThread.disable(); <del> _log.info(getPrefix() + "Error trying to connect", ex); <add> _log.info(getPrefix(requestId) + "Error trying to connect", ex); <ide> l.log(ex.getMessage()); <del> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); <add> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); <ide> closeSocket(s); <ide> } catch (IOException ex) { <ide> if (timeoutThread != null) timeoutThread.disable(); <del> _log.info(getPrefix() + "Error trying to connect", ex); <add> _log.info(getPrefix(requestId) + "Error trying to connect", ex); <ide> l.log(ex.getMessage()); <del> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); <add> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); <ide> closeSocket(s); <ide> } catch (I2PException ex) { <ide> if (timeoutThread != null) timeoutThread.disable(); <del> _log.info("getPrefix() + Error trying to connect", ex); <add> _log.info("getPrefix(requestId) + Error trying to connect", ex); <ide> l.log(ex.getMessage()); <del> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy); <add> handleHTTPClientException(ex, out, targetRequest, usingWWWProxy, currentProxy, requestId); <ide> closeSocket(s); <ide> } <ide> } <ide> private static volatile long __timeoutId = 0; <ide> <ide> private class InactivityTimeoutThread extends I2PThread { <del> <add> <ide> private Socket s; <ide> private I2PTunnelRunner _runner; <ide> private OutputStream _out; <ide> private String _targetRequest; <ide> private boolean _useWWWProxy; <ide> private String _currentProxy; <add> private long _requestId; <ide> private boolean _disabled; <ide> private Object _disableLock = new Object(); <ide> <ide> public InactivityTimeoutThread(I2PTunnelRunner runner, OutputStream out, String targetRequest, <del> boolean useWWWProxy, String currentProxy, Socket s) { <add> boolean useWWWProxy, String currentProxy, Socket s, long requestId) { <ide> this.s = s; <ide> _runner = runner; <ide> _out = out; <ide> _useWWWProxy = useWWWProxy; <ide> _currentProxy = currentProxy; <ide> _disabled = false; <add> _requestId = requestId; <ide> long timeoutId = ++__timeoutId; <del> setName("InactivityThread " + getPrefix() + timeoutId); <add> setName("InactivityThread " + getPrefix(requestId) + timeoutId); <ide> } <ide> <ide> public void disable() { <ide> public void run() { <ide> while (!_disabled) { <ide> if (_runner.isFinished()) { <del> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix() + "HTTP client request completed prior to timeout"); <add> if (_log.shouldLog(Log.INFO)) _log.info(getPrefix(_requestId) + "HTTP client request completed prior to timeout"); <ide> return; <ide> } <ide> if (_runner.getLastActivityOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { <ide> if (_runner.getStartedOn() < Clock.getInstance().now() - INACTIVITY_TIMEOUT) { <ide> if (_log.shouldLog(Log.WARN)) <del> _log.warn(getPrefix() + "HTTP client request timed out (lastActivity: " <add> _log.warn(getPrefix(_requestId) + "HTTP client request timed out (lastActivity: " <ide> + new Date(_runner.getLastActivityOn()) + ", startedOn: " <ide> + new Date(_runner.getStartedOn()) + ")"); <ide> timeout(); <ide> } <ide> <ide> private void timeout() { <del> _log.info(getPrefix() + "Inactivity timeout reached"); <add> _log.info(getPrefix(_requestId) + "Inactivity timeout reached"); <ide> l.log("Inactivity timeout reached"); <ide> if (_out != null) { <ide> try { <ide> writeErrorMessage(ERR_TIMEOUT, _out, _targetRequest, _useWWWProxy, _currentProxy); <ide> } <ide> } catch (IOException ioe) { <del> _log.warn(getPrefix() + "Error writing out the 'timeout' message", ioe); <add> _log.warn(getPrefix(_requestId) + "Error writing out the 'timeout' message", ioe); <ide> } <ide> } else { <del> _log.warn(getPrefix() + "Client disconnected before we could say we timed out"); <add> _log.warn(getPrefix(_requestId) + "Client disconnected before we could say we timed out"); <ide> } <ide> closeSocket(s); <ide> } <ide> } <ide> <ide> private final static String getHostName(String host) { <add> if (host == null) return null; <ide> try { <ide> Destination dest = I2PTunnel.destFromName(host); <ide> if (dest == null) return "i2p"; <ide> } <ide> <ide> private void handleHTTPClientException(Exception ex, OutputStream out, String targetRequest, <del> boolean usingWWWProxy, String wwwProxy) { <add> boolean usingWWWProxy, String wwwProxy, long requestId) { <ide> <ide> if (_log.shouldLog(Log.WARN)) <del> _log.warn("Error sending to " + wwwProxy + " (proxy? " + usingWWWProxy + ", request: " + targetRequest, ex); <add> _log.warn(getPrefix(requestId) + "Error sending to " + wwwProxy + " (proxy? " + usingWWWProxy + ", request: " + targetRequest, ex); <ide> if (out != null) { <ide> try { <ide> writeErrorMessage(ERR_DESTINATION_UNKNOWN, out, targetRequest, usingWWWProxy, wwwProxy); <ide> } catch (IOException ioe) { <del> _log.warn(getPrefix() + "Error writing out the 'destination was unknown' " + "message", ioe); <add> _log.warn(getPrefix(requestId) + "Error writing out the 'destination was unknown' " + "message", ioe); <ide> } <ide> } else { <del> _log.warn(getPrefix() + "Client disconnected before we could say that destination " + "was unknown", ex); <add> _log.warn(getPrefix(requestId) + "Client disconnected before we could say that destination " + "was unknown", ex); <ide> } <ide> } <ide>
Java
mit
8a5e811c7fbc3eb660e8705cab306d317a379326
0
MightyPirates/OC-LuaJ,MightyPirates/OC-LuaJ
package org.luaj.vm2; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import junit.framework.TestCase; import org.luaj.vm2.lib.jse.JsePlatform; public class StringTest extends TestCase { protected void setUp() throws Exception { JsePlatform.standardGlobals(); } public void testToInputStream() throws IOException { LuaString str = LuaString.valueOf("Hello"); InputStream is = str.toInputStream(); assertEquals( 'H', is.read() ); assertEquals( 'e', is.read() ); assertEquals( 2, is.skip( 2 ) ); assertEquals( 'o', is.read() ); assertEquals( -1, is.read() ); assertTrue( is.markSupported() ); is.reset(); assertEquals( 'H', is.read() ); is.mark( 4 ); assertEquals( 'e', is.read() ); is.reset(); assertEquals( 'e', is.read() ); LuaString substr = str.substring( 1, 4 ); assertEquals( 3, substr.length() ); is.close(); is = substr.toInputStream(); assertEquals( 'e', is.read() ); assertEquals( 'l', is.read() ); assertEquals( 'l', is.read() ); assertEquals( -1, is.read() ); is = substr.toInputStream(); is.reset(); assertEquals( 'e', is.read() ); } private static final String userFriendly( String s ) { StringBuffer sb = new StringBuffer(); for ( int i=0, n=s.length(); i<n; i++ ) { int c = s.charAt(i); if ( c < ' ' || c >= 0x80 ) { sb.append( "\\u"+Integer.toHexString(0x10000+c).substring(1) ); } else { sb.append( (char) c ); } } return sb.toString(); } public void testUtf820482051() throws UnsupportedEncodingException { int i = 2048; char[] c = { (char) (i+0), (char) (i+1), (char) (i+2), (char) (i+3) }; String before = new String(c)+" "+i+"-"+(i+4); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } public void testUtf8() { for ( int i=4; i<0xffff; i+=4 ) { char[] c = { (char) (i+0), (char) (i+1), (char) (i+2), (char) (i+3) }; String before = new String(c)+" "+i+"-"+(i+4); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } char[] c = { (char) (1), (char) (2), (char) (3) }; String before = new String(c)+" 1-3"; LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } public void testSpotCheckUtf8() throws UnsupportedEncodingException { byte[] bytes = {(byte)194,(byte)160,(byte)194,(byte)161,(byte)194,(byte)162,(byte)194,(byte)163,(byte)194,(byte)164}; String expected = new String(bytes, "UTF8"); String actual = LuaString.valueOf(bytes).tojstring(); char[] d = actual.toCharArray(); assertEquals(160, d[0]); assertEquals(161, d[1]); assertEquals(162, d[2]); assertEquals(163, d[3]); assertEquals(164, d[4]); assertEquals(expected, actual); } public void testNullTerminated() { char[] c = { 'a', 'b', 'c', '\0', 'd', 'e', 'f' }; String before = new String(c); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( "abc\0def" ), userFriendly( after ) ); } public void testRecentStringsCacheDifferentHashcodes() { final byte[] abc = {'a', 'b', 'c' }; final byte[] xyz = {'x', 'y', 'z' }; final LuaString abc1 = LuaString.valueOf(abc); final LuaString xyz1 = LuaString.valueOf(xyz); final LuaString abc2 = LuaString.valueOf(abc); final LuaString xyz2 = LuaString.valueOf(xyz); final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertTrue(abc1.hashCode() % mod != xyz1.hashCode() % mod); assertSame(abc1, abc2); assertSame(xyz1, xyz2); } public void testRecentStringsCacheHashCollisionCacheHit() { final byte[] abc = {'a', 'b', 'c' }; final byte[] lyz = {'l', 'y', 'z' }; // chosen to have hash collision with 'abc' final LuaString abc1 = LuaString.valueOf(abc); final LuaString abc2 = LuaString.valueOf(abc); // in cache: 'abc' final LuaString lyz1 = LuaString.valueOf(lyz); final LuaString lyz2 = LuaString.valueOf(lyz); // in cache: 'lyz' final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertNotSame(abc1, lyz1); assertFalse(abc1.equals(lyz1)); assertSame(abc1, abc2); assertSame(lyz1, lyz2); } public void testRecentStringsCacheHashCollisionCacheMiss() { final byte[] abc = {'a', 'b', 'c' }; final byte[] lyz = {'l', 'y', 'z' }; // chosen to have hash collision with 'abc' final LuaString abc1 = LuaString.valueOf(abc); final LuaString lyz1 = LuaString.valueOf(lyz); // in cache: 'abc' final LuaString abc2 = LuaString.valueOf(abc); // in cache: 'lyz' final LuaString lyz2 = LuaString.valueOf(lyz); // in cache: 'abc' final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertNotSame(abc1, lyz1); assertFalse(abc1.equals(lyz1)); assertNotSame(abc1, abc2); assertNotSame(lyz1, lyz2); } public void testRecentStringsLongStrings() { byte[] abc = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(); assertTrue(abc.length > LuaString.RECENT_STRINGS_MAX_LENGTH); LuaString abc1 = LuaString.valueOf(abc); LuaString abc2 = LuaString.valueOf(abc); assertNotSame(abc1, abc2); } public void testRecentStringsUsingJavaStrings() { final String abc = "abc"; final String lyz = "lyz"; // chosen to have hash collision with 'abc' final String xyz = "xyz"; final LuaString abc1 = LuaString.valueOf(abc); final LuaString abc2 = LuaString.valueOf(abc); final LuaString lyz1 = LuaString.valueOf(lyz); final LuaString lyz2 = LuaString.valueOf(lyz); final LuaString xyz1 = LuaString.valueOf(xyz); final LuaString xyz2 = LuaString.valueOf(xyz); final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertFalse(abc1.hashCode() % mod == xyz1.hashCode() % mod); assertSame(abc1, abc2); assertSame(lyz1, lyz2); assertSame(xyz1, xyz2); final LuaString abc3 = LuaString.valueOf(abc); final LuaString lyz3 = LuaString.valueOf(lyz); final LuaString xyz3 = LuaString.valueOf(xyz); final LuaString abc4 = LuaString.valueOf(abc); final LuaString lyz4 = LuaString.valueOf(lyz); final LuaString xyz4 = LuaString.valueOf(xyz); assertNotSame(abc3, abc4); // because of hash collision assertNotSame(lyz3, lyz4); // because of hash collision assertSame(xyz3, xyz4); // because hashes do not collide } public void testLongSubstringGetsOldBacking() { LuaString src = LuaString.valueOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); LuaString sub1 = src.substring(10, 40); assertSame(src.m_bytes, sub1.m_bytes); assertEquals(sub1.m_offset, 10); assertEquals(sub1.m_length, 30); } public void testShortSubstringGetsNewBacking() { LuaString src = LuaString.valueOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); LuaString sub1 = src.substring(10, 20); LuaString sub2 = src.substring(10, 20); assertEquals(sub1.m_offset, 0); assertEquals(sub1.m_length, 10); assertSame(sub1, sub2); assertFalse(src.m_bytes == sub1.m_bytes); } public void testShortSubstringOfVeryLongStringGetsNewBacking() { LuaString src = LuaString.valueOf( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" ); LuaString sub1 = src.substring(10, 50); LuaString sub2 = src.substring(10, 50); assertEquals(sub1.m_offset, 0); assertEquals(sub1.m_length, 40); assertFalse(sub1 == sub2); assertFalse(src.m_bytes == sub1.m_bytes); } }
test/junit/org/luaj/vm2/StringTest.java
package org.luaj.vm2; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import junit.framework.TestCase; import org.luaj.vm2.lib.jse.JsePlatform; public class StringTest extends TestCase { protected void setUp() throws Exception { JsePlatform.standardGlobals(); } public void testToInputStream() throws IOException { LuaString str = LuaString.valueOf("Hello"); InputStream is = str.toInputStream(); assertEquals( 'H', is.read() ); assertEquals( 'e', is.read() ); assertEquals( 2, is.skip( 2 ) ); assertEquals( 'o', is.read() ); assertEquals( -1, is.read() ); assertTrue( is.markSupported() ); is.reset(); assertEquals( 'H', is.read() ); is.mark( 4 ); assertEquals( 'e', is.read() ); is.reset(); assertEquals( 'e', is.read() ); LuaString substr = str.substring( 1, 4 ); assertEquals( 3, substr.length() ); is.close(); is = substr.toInputStream(); assertEquals( 'e', is.read() ); assertEquals( 'l', is.read() ); assertEquals( 'l', is.read() ); assertEquals( -1, is.read() ); is = substr.toInputStream(); is.reset(); assertEquals( 'e', is.read() ); } private static final String userFriendly( String s ) { StringBuffer sb = new StringBuffer(); for ( int i=0, n=s.length(); i<n; i++ ) { int c = s.charAt(i); if ( c < ' ' || c >= 0x80 ) { sb.append( "\\u"+Integer.toHexString(0x10000+c).substring(1) ); } else { sb.append( (char) c ); } } return sb.toString(); } public void testUtf820482051() throws UnsupportedEncodingException { int i = 2048; char[] c = { (char) (i+0), (char) (i+1), (char) (i+2), (char) (i+3) }; String before = new String(c)+" "+i+"-"+(i+4); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } public void testUtf8() { for ( int i=4; i<0xffff; i+=4 ) { char[] c = { (char) (i+0), (char) (i+1), (char) (i+2), (char) (i+3) }; String before = new String(c)+" "+i+"-"+(i+4); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } char[] c = { (char) (1), (char) (2), (char) (3) }; String before = new String(c)+" 1-3"; LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( before ), userFriendly( after ) ); } public void testSpotCheckUtf8() throws UnsupportedEncodingException { byte[] bytes = {(byte)194,(byte)160,(byte)194,(byte)161,(byte)194,(byte)162,(byte)194,(byte)163,(byte)194,(byte)164}; String expected = new String(bytes, "UTF8"); String actual = LuaString.valueOf(bytes).tojstring(); char[] d = actual.toCharArray(); assertEquals(160, d[0]); assertEquals(161, d[1]); assertEquals(162, d[2]); assertEquals(163, d[3]); assertEquals(164, d[4]); assertEquals(expected, actual); } public void testNullTerminated() { char[] c = { 'a', 'b', 'c', '\0', 'd', 'e', 'f' }; String before = new String(c); LuaString ls = LuaString.valueOf(before); String after = ls.tojstring(); assertEquals( userFriendly( "abc\0def" ), userFriendly( after ) ); } public void testRecentStringsCacheDifferentHashcodes() { final byte[] abc = {'a', 'b', 'c' }; final byte[] xyz = {'x', 'y', 'z' }; final LuaString abc1 = LuaString.valueOf(abc); final LuaString xyz1 = LuaString.valueOf(xyz); final LuaString abc2 = LuaString.valueOf(abc); final LuaString xyz2 = LuaString.valueOf(xyz); final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertTrue(abc1.hashCode() % mod != xyz1.hashCode() % mod); assertSame(abc1, abc2); assertSame(xyz1, xyz2); } public void testRecentStringsCacheHashCollisionCacheHit() { final byte[] abc = {'a', 'b', 'c' }; final byte[] lyz = {'l', 'y', 'z' }; // chosen to have hash collision with 'abc' final LuaString abc1 = LuaString.valueOf(abc); final LuaString abc2 = LuaString.valueOf(abc); // in cache: 'abc' final LuaString lyz1 = LuaString.valueOf(lyz); final LuaString lyz2 = LuaString.valueOf(lyz); // in cache: 'lyz' final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertNotSame(abc1, lyz1); assertFalse(abc1.equals(lyz1)); assertSame(abc1, abc2); assertSame(lyz1, lyz2); } public void testRecentStringsCacheHashCollisionCacheMiss() { final byte[] abc = {'a', 'b', 'c' }; final byte[] lyz = {'l', 'y', 'z' }; // chosen to have hash collision with 'abc' final LuaString abc1 = LuaString.valueOf(abc); final LuaString lyz1 = LuaString.valueOf(lyz); // in cache: 'abc' final LuaString abc2 = LuaString.valueOf(abc); // in cache: 'lyz' final LuaString lyz2 = LuaString.valueOf(lyz); // in cache: 'abc' final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertNotSame(abc1, lyz1); assertFalse(abc1.equals(lyz1)); assertNotSame(abc1, abc2); assertNotSame(lyz1, lyz2); } public void testRecentStringsLongStrings() { byte[] abc = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(); assertTrue(abc.length > LuaString.RECENT_STRINGS_MAX_LENGTH); LuaString abc1 = LuaString.valueOf(abc); LuaString abc2 = LuaString.valueOf(abc); assertNotSame(abc1, abc2); } public void testRecentStringsUsingJavaStrings() { final String abc = "abc"; final String lyz = "lyz"; // chosen to have hash collision with 'abc' final String xyz = "xyz"; final LuaString abc1 = LuaString.valueOf(abc); final LuaString abc2 = LuaString.valueOf(abc); final LuaString lyz1 = LuaString.valueOf(lyz); final LuaString lyz2 = LuaString.valueOf(lyz); final LuaString xyz1 = LuaString.valueOf(xyz); final LuaString xyz2 = LuaString.valueOf(xyz); final int mod = LuaString.RECENT_STRINGS_CACHE_SIZE; assertEquals(abc1.hashCode() % mod, lyz1.hashCode() % mod); assertFalse(abc1.hashCode() % mod == xyz1.hashCode() % mod); assertSame(abc1, abc2); assertSame(lyz1, lyz2); assertSame(xyz1, xyz2); final LuaString abc3 = LuaString.valueOf(abc); final LuaString lyz3 = LuaString.valueOf(lyz); final LuaString xyz3 = LuaString.valueOf(xyz); final LuaString abc4 = LuaString.valueOf(abc); final LuaString lyz4 = LuaString.valueOf(lyz); final LuaString xyz4 = LuaString.valueOf(xyz); assertNotSame(abc3, abc4); // because of hash collision assertNotSame(lyz3, lyz4); // because of hash collision assertSame(xyz3, xyz4); // because hashes do not collide } public void testLongSubstringGetsOldBacking() { LuaString src = LuaString.valueOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); LuaString sub1 = src.substring(10, 40); assertSame(src.m_bytes, sub1.m_bytes); assertEquals(sub1.m_offset, 10); assertEquals(sub1.m_length, 30); } public void testShortSubstringGetsNewBacking() { LuaString src = LuaString.valueOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"); LuaString sub1 = src.substring(10, 20); LuaString sub2 = src.substring(10, 20); assertEquals(sub1.m_offset, 0); assertEquals(sub1.m_length, 10); assertSame(sub1, sub2); assertFalse(src.m_bytes == sub1.m_bytes); } }
Add test for long substring.
test/junit/org/luaj/vm2/StringTest.java
Add test for long substring.
<ide><path>est/junit/org/luaj/vm2/StringTest.java <ide> assertSame(sub1, sub2); <ide> assertFalse(src.m_bytes == sub1.m_bytes); <ide> } <add> <add> public void testShortSubstringOfVeryLongStringGetsNewBacking() { <add> LuaString src = LuaString.valueOf( <add> "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + <add> "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" ); <add> LuaString sub1 = src.substring(10, 50); <add> LuaString sub2 = src.substring(10, 50); <add> assertEquals(sub1.m_offset, 0); <add> assertEquals(sub1.m_length, 40); <add> assertFalse(sub1 == sub2); <add> assertFalse(src.m_bytes == sub1.m_bytes); <add> } <ide> }
JavaScript
mit
b56b3ca63f32eb01b9534e0a0d8b1fa2913cd40b
0
js-fns/date-fns,date-fns/date-fns,date-fns/date-fns,js-fns/date-fns,date-fns/date-fns
import path from 'path' import fs from 'fs' const files = fs.readdirSync(path.join(process.cwd(), 'src')) const propertyRequireLines = files .filter((file) => file.match(/\.js/)) .map((file) => file.replace(/\.js/, '')) .map((file) => ` ${camelize(file)}: require('./src/${file}')`) const indexLines = ['module.exports = {'] .concat(propertyRequireLines.join(',\n')) .concat('}') fs.writeFileSync(path.join(process.cwd(), 'index.js'), `${indexLines.join('\n')}\n`) function camelize(str) { return str .split('_') .map((word, index) => { if (index === 0) { return word } else if (word === 'iso') { return 'ISO' } else { return word.charAt(0).toUpperCase() + word.slice(1) } }) .join('') }
scripts/generate_index.js
import path from 'path' import fs from 'fs' const files = fs.readdirSync(path.join(process.cwd(), 'src')) const propertyRequireLines = files .filter((file) => file.match(/\.js/)) .map((file) => file.replace(/\.js/, '')) .map((file) => ` ${camelize(file)}: require('./src/${file}')`) const indexLines = ['module.exports = {'] .concat(propertyRequireLines.join(',\n')) .concat('}') fs.writeFileSync(path.join(process.cwd(), 'index.js'), `${indexLines.join('\n')}\n`) function camelize(str) { return str.replace(/[-_\s]+(.)?/g, (match, c) => c ? c.toUpperCase() : '') }
Fix scripts/generate_index.js so it correctly handles ISO week functions (closes #82)
scripts/generate_index.js
Fix scripts/generate_index.js so it correctly handles ISO week functions (closes #82)
<ide><path>cripts/generate_index.js <ide> fs.writeFileSync(path.join(process.cwd(), 'index.js'), `${indexLines.join('\n')}\n`) <ide> <ide> function camelize(str) { <del> return str.replace(/[-_\s]+(.)?/g, (match, c) => c ? c.toUpperCase() : '') <add> return str <add> .split('_') <add> .map((word, index) => { <add> if (index === 0) { <add> return word <add> } else if (word === 'iso') { <add> return 'ISO' <add> } else { <add> return word.charAt(0).toUpperCase() + word.slice(1) <add> } <add> }) <add> .join('') <ide> }
Java
apache-2.0
351cb517e2e6e5c357fb52c46b32bc527785a7ee
0
stanfy/enroscar,stanfy/enroscar
package com.stanfy.enroscar.goro; import android.test.AndroidTestCase; import android.test.FlakyTest; import com.stanfy.enroscar.async.AsyncObserver; import com.stanfy.enroscar.goro.support.AsyncGoro; import com.stanfy.enroscar.goro.support.RxGoro; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import rx.functions.Action0; import rx.functions.Action1; import static org.fest.assertions.api.Assertions.assertThat; /** * Test oneshot Goro usage. */ public class BoundGoroAndroidTest extends AndroidTestCase { private BoundGoro goro; private String res; private CountDownLatch scheduleSync; @Override protected void setUp() throws Exception { super.setUp(); GoroService.setDelegateExecutor(null); goro = Goro.bindWith(getContext()); res = "fail"; scheduleSync = prepareScheduleSync(); } private CountDownLatch prepareScheduleSync() throws InterruptedException { final CountDownLatch scheduleSync = new CountDownLatch(1); final CountDownLatch listenerSync = new CountDownLatch(1); GoroServiceAndroidTest.onMainThread(new Runnable() { @Override public void run() { goro.addTaskListener(new GoroListener() { @Override public void onTaskSchedule(Callable<?> task, String queue) { scheduleSync.countDown(); } @Override public void onTaskStart(Callable<?> task) { } @Override public void onTaskFinish(Callable<?> task, Object result) { } @Override public void onTaskCancel(Callable<?> task) { } @Override public void onTaskError(Callable<?> task, Throwable error) { } }); listenerSync.countDown(); } }); listenerSync.await(); return scheduleSync; } @FlakyTest public void testScheduleBindGet() { Future<?> future = goro.schedule(new Callable<Object>() { @Override public Object call() throws Exception { throw new Exception("aha!"); } }); goro.bindOneshot(); awaitScheduling(); try { future.get(10, TimeUnit.SECONDS); fail("Exception expected"); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { assertEquals("aha!", e.getCause().getMessage()); } catch (TimeoutException e) { fail("Not executed"); } // unbound? // FIXME: bindOneShot is still flaky // assertNull(((BoundGoro.BoundGoroImpl) goro).getServiceObject()); } private void awaitScheduling() { try { assertThat(scheduleSync.await(20, TimeUnit.SECONDS)).isTrue(); } catch (InterruptedException e) { throw new AssertionError(e); } } @FlakyTest public void testScheduleObserveBind() { final CountDownLatch sync = new CountDownLatch(1); goro.schedule(new Callable<String>() { @Override public String call() throws Exception { return "ok"; } }).subscribe(new FutureObserver<String>() { @Override public void onSuccess(String value) { res = value; sync.countDown(); } @Override public void onError(Throwable error) { fail(error.getMessage()); } }); goro.bindOneshot(); awaitScheduling(); await(sync); // unbound? assertNull(((BoundGoro.BoundGoroImpl) goro).getServiceObject()); assertEquals("ok", res); } public void testWithAsyncIntegration() { final CountDownLatch sync = new CountDownLatch(1); new AsyncGoro(goro).schedule(new Callable<String>() { @Override public String call() throws Exception { return "async"; } }).subscribe(new AsyncObserver<String>() { @Override public void onError(final Throwable e) { throw new AssertionError(e); } @Override public void onResult(final String data) { res = data; sync.countDown(); } @Override public void onReset() { // nothing } }); goro.bindOneshot(); awaitScheduling(); await(sync); assertEquals("async", res); } public void testWithRxIntegration() { final CountDownLatch sync = new CountDownLatch(2); new RxGoro(goro).schedule(new Callable<String>() { @Override public String call() throws Exception { return "rx"; } }).doOnCompleted(new Action0() { @Override public void call() { sync.countDown(); } }).subscribe(new Action1<String>() { @Override public void call(String o) { res = o; sync.countDown(); } }); goro.bindOneshot(); awaitScheduling(); await(sync); assertEquals("rx", res); } private void await(CountDownLatch sync) { try { assertEquals(true, sync.await(10, TimeUnit.SECONDS)); } catch (InterruptedException e) { throw new AssertionError(e); } } }
goro/core/src/androidTest/java/com/stanfy/enroscar/goro/BoundGoroAndroidTest.java
package com.stanfy.enroscar.goro; import android.test.AndroidTestCase; import android.test.FlakyTest; import com.google.android.apps.common.testing.ui.espresso.Espresso; import com.stanfy.enroscar.async.AsyncObserver; import com.stanfy.enroscar.goro.support.AsyncGoro; import com.stanfy.enroscar.goro.support.RxGoro; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import rx.functions.Action0; import rx.functions.Action1; import static org.fest.assertions.api.Assertions.assertThat; /** * Test oneshot Goro usage. */ public class BoundGoroAndroidTest extends AndroidTestCase { private BoundGoro goro; private String res; private CountDownLatch scheduleSync; @Override protected void setUp() throws Exception { super.setUp(); GoroService.setDelegateExecutor(null); goro = Goro.bindWith(getContext()); res = "fail"; scheduleSync = prepareScheduleSync(); } private CountDownLatch prepareScheduleSync() throws InterruptedException { final CountDownLatch scheduleSync = new CountDownLatch(1); final CountDownLatch listenerSync = new CountDownLatch(1); GoroServiceAndroidTest.onMainThread(new Runnable() { @Override public void run() { goro.addTaskListener(new GoroListener() { @Override public void onTaskSchedule(Callable<?> task, String queue) { scheduleSync.countDown(); } @Override public void onTaskStart(Callable<?> task) { } @Override public void onTaskFinish(Callable<?> task, Object result) { } @Override public void onTaskCancel(Callable<?> task) { } @Override public void onTaskError(Callable<?> task, Throwable error) { } }); listenerSync.countDown(); } }); listenerSync.await(); return scheduleSync; } @FlakyTest public void testScheduleBindGet() { Future<?> future = goro.schedule(new Callable<Object>() { @Override public Object call() throws Exception { throw new Exception("aha!"); } }); goro.bindOneshot(); awaitScheduling(); try { future.get(10, TimeUnit.SECONDS); fail("Exception expected"); } catch (InterruptedException e) { throw new AssertionError(e); } catch (ExecutionException e) { assertEquals("aha!", e.getCause().getMessage()); } catch (TimeoutException e) { fail("Not executed"); } // unbound? // FIXME: bindOneShot is still flaky // assertNull(((BoundGoro.BoundGoroImpl) goro).getServiceObject()); } private void awaitScheduling() { try { assertThat(scheduleSync.await(20, TimeUnit.SECONDS)).isTrue(); } catch (InterruptedException e) { throw new AssertionError(e); } } @FlakyTest public void testScheduleObserveBind() { final CountDownLatch sync = new CountDownLatch(1); goro.schedule(new Callable<String>() { @Override public String call() throws Exception { return "ok"; } }).subscribe(new FutureObserver<String>() { @Override public void onSuccess(String value) { res = value; sync.countDown(); } @Override public void onError(Throwable error) { fail(error.getMessage()); } }); goro.bindOneshot(); awaitScheduling(); await(sync); // unbound? assertNull(((BoundGoro.BoundGoroImpl) goro).getServiceObject()); assertEquals("ok", res); } public void testWithAsyncIntegration() { final CountDownLatch sync = new CountDownLatch(1); new AsyncGoro(goro).schedule(new Callable<String>() { @Override public String call() throws Exception { return "async"; } }).subscribe(new AsyncObserver<String>() { @Override public void onError(final Throwable e) { throw new AssertionError(e); } @Override public void onResult(final String data) { res = data; sync.countDown(); } }); goro.bindOneshot(); awaitScheduling(); await(sync); assertEquals("async", res); } public void testWithRxIntegration() { final CountDownLatch sync = new CountDownLatch(2); new RxGoro(goro).schedule(new Callable<String>() { @Override public String call() throws Exception { return "rx"; } }).doOnCompleted(new Action0() { @Override public void call() { sync.countDown(); } }).subscribe(new Action1<String>() { @Override public void call(String o) { res = o; sync.countDown(); } }); goro.bindOneshot(); awaitScheduling(); await(sync); assertEquals("rx", res); } private void await(CountDownLatch sync) { try { assertEquals(true, sync.await(10, TimeUnit.SECONDS)); } catch (InterruptedException e) { throw new AssertionError(e); } } }
fix test compilation
goro/core/src/androidTest/java/com/stanfy/enroscar/goro/BoundGoroAndroidTest.java
fix test compilation
<ide><path>oro/core/src/androidTest/java/com/stanfy/enroscar/goro/BoundGoroAndroidTest.java <ide> import android.test.AndroidTestCase; <ide> import android.test.FlakyTest; <ide> <del>import com.google.android.apps.common.testing.ui.espresso.Espresso; <ide> import com.stanfy.enroscar.async.AsyncObserver; <ide> import com.stanfy.enroscar.goro.support.AsyncGoro; <ide> import com.stanfy.enroscar.goro.support.RxGoro; <ide> res = data; <ide> sync.countDown(); <ide> } <add> @Override <add> public void onReset() { <add> // nothing <add> } <ide> }); <ide> <ide> goro.bindOneshot();
Java
bsd-3-clause
315d1cafd4c2427c125769376f7f0c0653e74853
0
bmc/curn,songfj/curn,songfj/curn,songfj/curn,songfj/curn,bmc/curn,bmc/curn,songfj/curn,bmc/curn,bmc/curn
/*---------------------------------------------------------------------------*\ $Id$ --------------------------------------------------------------------------- This software is released under a Berkeley-style license: Copyright (c) 2004-2006 Brian M. Clapper. All rights reserved. Redistribution and use in source and binary forms are permitted provided that: (1) source distributions retain this entire copyright notice and comment; and (2) modifications made to the software are prominently mentioned, and a copy of the original software (or a pointer to its location) are included. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. Effectively, this means you can do what you want with the software except remove this notice or take advantage of the author's name. If you modify the software and redistribute your modified version, you must indicate that your version is a modification of the original, and you must provide either a pointer to or a copy of the original. \*---------------------------------------------------------------------------*/ package org.clapper.curn; import java.io.IOException; import java.io.File; import java.io.StringWriter; import java.io.PrintWriter; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import org.clapper.curn.parser.RSSChannel; import org.clapper.util.cmdline.CommandLineUtility; import org.clapper.util.cmdline.CommandLineException; import org.clapper.util.cmdline.CommandLineUsageException; import org.clapper.util.cmdline.UsageInfo; import org.clapper.util.config.ConfigurationException; import org.clapper.util.io.WordWrapWriter; import org.clapper.util.logging.Logger; import org.clapper.util.misc.BuildInfo; /** * <p><i>curn</i>: Customizable Utilitarian RSS Notifier.</p> * * <p><i>curn</i> is an RSS reader. It scans a configured set of URLs, each * one representing an RSS feed, and summarizes the results in an * easy-to-read text format. <i>curn</i> keeps track of URLs it's seen * before, using an on-disk cache; when using the cache, it will suppress * displaying URLs it has already reported (though that behavior can be * disabled). <i>curn</i> can be extended to use any RSS parser; its * built-in RSS parser, the * {@link org.clapper.curn.parser.minirss.MiniRSSParser MiniRSSParser} * class, can handle files in * {@link <a href="http://www.atomenabled.org/developers/">Atom</a>} * format (0.3) and RSS formats * {@link <a target="_top" href="http://backend.userland.com/rss091">0.91</a>}, * 0.92, * {@link <a target="_top" href="http://web.resource.org/rss/1.0/">1.0</a>} and * {@link <a target="_top" href="http://blogs.law.harvard.edu/tech/rss">2.0</a>}.</p> * * <p>This class is a command-line wrapper for <i>curn</i>. Run it with * no parameters for a usage summary.</p> * * @version <tt>$Revision$</tt> */ public class Tool extends CommandLineUtility implements PostConfigPlugIn { /*----------------------------------------------------------------------*\ Private Constants \*----------------------------------------------------------------------*/ private static Collection<DateParseInfo> DATE_FORMATS; static { DATE_FORMATS = new ArrayList<DateParseInfo>(); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm:ss a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm:ss", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h:mm a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd HH:mm:ss", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd HH:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd H:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd", false)); DATE_FORMATS.add (new DateParseInfo ("yy/MM/dd", false)); DATE_FORMATS.add (new DateParseInfo ("hh:mm:ss a", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm:ss", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm", true)); DATE_FORMATS.add (new DateParseInfo ("h:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("h:mm", true)); DATE_FORMATS.add (new DateParseInfo ("hh a", true)); DATE_FORMATS.add (new DateParseInfo ("h a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm:ss a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm:ss", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm", true)); DATE_FORMATS.add (new DateParseInfo ("H:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("H:mm", true)); }; /*----------------------------------------------------------------------*\ Private Data Items \*----------------------------------------------------------------------*/ private String configPath = null; private boolean useCache = true; private Date currentTime = new Date(); private boolean optShowBuildInfo = false; private boolean optShowVersion = false; private Boolean optShowDates = null; private Boolean optShowAuthors = null; private Boolean optRSSVersion = null; private Boolean optUpdateCache = null; private boolean useGzip = true; private int maxThreads = 0; /** * For log messages */ private static Logger log = new Logger (Tool.class); /*----------------------------------------------------------------------*\ Main Program \*----------------------------------------------------------------------*/ public static void main (String[] args) { Tool tool = new Tool(); try { tool.execute (args); } catch (CommandLineUsageException ex) { // Already reported System.exit (1); } catch (CommandLineException ex) { WordWrapWriter err = new WordWrapWriter (System.err); //err.println (ex.getMessage()); ex.printStackTrace (System.err); System.exit (1); } catch (Exception ex) { ex.printStackTrace (System.err); System.exit (1); } } /*----------------------------------------------------------------------*\ Constructor \*----------------------------------------------------------------------*/ private Tool() { } /*----------------------------------------------------------------------*\ Public Methods Required by PlugIn Interface \*----------------------------------------------------------------------*/ public String getName() { return "curn command-line interface"; } public void runPostConfigPlugIn (CurnConfig config) throws CurnException { try { adjustConfiguration (config); } catch (ConfigurationException ex) { throw new CurnException (ex); } } /*----------------------------------------------------------------------*\ Protected Methods \*----------------------------------------------------------------------*/ /** * Called by <tt>parseParams()</tt> to handle any option it doesn't * recognize. If the option takes any parameters, the overridden * method must extract the parameter by advancing the supplied * <tt>Iterator</tt> (which returns <tt>String</tt> objects). This * default method simply throws an exception. * * @param shortOption the short option character, or * {@link UsageInfo#NO_SHORT_OPTION} if there isn't * one (i.e., if this is a long-only option). * @param longOption the long option string, without any leading * "-" characters, or null if this is a short-only * option * @param it the <tt>Iterator</tt> for the remainder of the * command line, for extracting parameters. * * @throws CommandLineUsageException on error * @throws NoSuchElementException overran the iterator (i.e., missing * parameter) */ protected void parseCustomOption (char shortOption, String longOption, Iterator<String> it) throws CommandLineUsageException, NoSuchElementException { switch (shortOption) { case 'a': // --authors optShowAuthors = Boolean.TRUE; break; case 'A': // --no-authors optShowAuthors = Boolean.FALSE; break; case 'B': // --build-info optShowBuildInfo = true; break; case 'C': // --no-cache useCache = false; break; case 'd': // --show-dates optShowDates = Boolean.TRUE; break; case 'D': // --no-dates optShowDates = Boolean.FALSE; break; case 'r': // --rss-version optRSSVersion = Boolean.TRUE; break; case 'R': // --no-rss-version optRSSVersion = Boolean.FALSE; break; case 't': // --time currentTime = parseDateTime (it.next()); break; case 'T': // --threads String arg = it.next(); try { maxThreads = Integer.parseInt (arg); if (maxThreads < 1) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.negNumericOption", "Value for \"{0}\" (\"{1}\") option must be " + "greater than 0", new Object[] {"-t", "--threads"}); } } catch (NumberFormatException ex) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.badNumericOption", "Bad numeric value \"{0}\" for \"{1}\" (\"{2}\") " + "option", new Object[] { arg, "-T", "--threads" }); } break; case 'u': // --no-update optUpdateCache = Boolean.FALSE; break; case 'v': optShowVersion = true; break; case 'z': // --gzip useGzip = true; break; case 'Z': // --no-gzip useGzip = false; break; default: // Should not happen. throw new IllegalStateException ("(BUG) Unknown option. " + "Why am I here?"); } } /** * <p>Called by <tt>parseParams()</tt> once option parsing is complete, * this method must handle any additional parameters on the command * line. It's not necessary for the method to ensure that the iterator * has the right number of strings left in it. If you attempt to pull * too many parameters from the iterator, it'll throw a * <tt>NoSuchElementException</tt>, which <tt>parseParams()</tt> traps * and converts into a suitable error message. Similarly, if there are * any parameters left in the iterator when this method returns, * <tt>parseParams()</tt> throws an exception indicating that there are * too many parameters on the command line.</p> * * <p>This method is called unconditionally, even if there are no * parameters left on the command line, so it's a useful place to do * post-option consistency checks, as well.</p> * * @param it the <tt>Iterator</tt> for the remainder of the * command line * * @throws CommandLineUsageException on error * @throws NoSuchElementException attempt to iterate past end of args; * <tt>parseParams()</tt> automatically * handles this exception, so it's * safe for subclass implementations of * this method not to handle it */ protected void processPostOptionCommandLine (Iterator<String> it) throws CommandLineUsageException, NoSuchElementException { // If we're showing build information or the version, forget about // the remainder of the command line. if (! (optShowBuildInfo || optShowVersion)) configPath = it.next(); } /** * Called by <tt>parseParams()</tt> to get the custom command-line * options and parameters handled by the subclass. This list is used * solely to build a usage message. The overridden method must fill the * supplied <tt>UsageInfo</tt> object: * * <ul> * <li> Each parameter must be added to the object, via the * <tt>UsageInfo.addParameter()</tt> method. The first argument * to <tt>addParameter()</tt> is the parameter string (e.g., * "<dbCfg>" or "input_file"). The second parameter is the * one-line description. The description may be of any length, * but it should be a single line. * * <li> Each option must be added to the object, via the * <tt>UsageInfo.addOption()</tt> method. The first argument to * <tt>addOption()</tt> is the option string (e.g., "-x" or * "-version"). The second parameter is the one-line * description. The description may be of any length, but it * should be a single line. * </ul> * * That information will be combined with the common options supported * by the base class, and used to build a usage message. * * @param info The <tt>UsageInfo</tt> object to fill. */ protected void getCustomUsageInfo (UsageInfo info) { info.addOption ('a', "show-authors", "Show the authors for each item, if available."); info.addOption ('A', "no-authors", "Don't the authors for each item, if available."); info.addOption ('B', "build-info", "Show full build information, then exit. " + "This option shows a bit more information than the " + UsageInfo.LONG_OPTION_PREFIX + "version option"); info.addOption ('C', "no-cache", "Don't use a cache file at all."); info.addOption ('d', "show-dates", "Show dates on feeds and feed items, if available."); info.addOption ('D', "no-dates", "Don't show dates on feeds and feed items."); info.addOption ('r', "rss-version", "Show the RSS version each site uses."); info.addOption ('R', "no-rss-version", "Don't show the RSS version each site uses."); info.addOption ('T', "threads", "<n>", "Set the number of concurrent download threads to " + "<n>. <n> must be greater than 0."); info.addOption ('u', "no-update", "Read the cache, but don't update it."); info.addOption ('v', "version", "Show version information, then exit."); info.addOption ('z', "gzip", "Ask remote HTTP servers to gzip content before " + "sending it."); info.addOption ('Z', "no-gzip", "Don't ask remote HTTP servers to gzip content before " + "sending it."); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter (sw); Date sampleDate; BuildInfo buildInfo = Version.getBuildInfo(); SimpleDateFormat dateFormat; String dateString = buildInfo.getBuildDate(); try { dateFormat = new SimpleDateFormat (BuildInfo.DATE_FORMAT_STRING); sampleDate = dateFormat.parse (dateString); } catch (Exception ex) { log.error ("Can't parse build date string \"" + dateString + "\" using format \"" + BuildInfo.DATE_FORMAT_STRING + "\"", ex); sampleDate = new Date(); } Set<String> printed = new HashSet<String>(); for (DateParseInfo dpi : DATE_FORMATS) { String s = dpi.formatDate (sampleDate); if (! printed.contains (s)) { pw.println(); pw.print (s); printed.add (s); } } info.addOption ('t', "time", "<time>", "For the purposes of cache expiration, pretend the " + "current time is <time>. <time> may be in one of the " + "following formats." + sw.toString()); info.addParameter ("config", "Path to configuration file", true); } /** * Run the curn tool. This method parses the command line arguments, * storing the results in an internal configuration; then, it * instantiates a <tt>Curn</tt> object and calls its <tt>run()</tt> * method. * * @throws CommandLineException error occurred */ protected void runCommand() throws CommandLineException { try { if (optShowBuildInfo) Version.showBuildInfo(); else if (optShowVersion) Version.showVersion(); else { // Allocate Curn object, which loads plug-ins. Curn curn = CurnFactory.newCurn(); // Add this object as a plug-in. MetaPlugIn.getMetaPlugIn().addPlugIn (this); // Fire it up. curn.setCurrentTime (currentTime); curn.run (configPath, this.useCache); } } catch (CurnException ex) { throw new CommandLineException (ex); } catch (Exception ex) { ex.printStackTrace (System.err); throw new CommandLineException (ex); } } /*----------------------------------------------------------------------*\ Private Methods \*----------------------------------------------------------------------*/ private void adjustConfiguration (CurnConfig config) throws ConfigurationException { log.debug ("adjustConfiguration() called."); // Adjust the configuration, if necessary, based on the command-line // parameters. if (optShowAuthors != null) config.setShowAuthorsFlag (optShowAuthors.booleanValue()); if (optRSSVersion != null) config.setShowRSSVersionFlag (optRSSVersion.booleanValue()); if (optUpdateCache != null) config.setMustUpdateCacheFlag (optUpdateCache.booleanValue()); if (optShowDates != null) config.setShowDatesFlag (true); if (maxThreads > 0) config.setMaxThreads (maxThreads); config.setRetrieveFeedsWithGzipFlag (useGzip); } private Date parseDateTime (String s) throws CommandLineUsageException { Date date = null; for (DateParseInfo dpi : DATE_FORMATS) { try { date = dpi.format.parse (s); if (date != null) { if (dpi.timeOnly) { // The date pattern specified only a time, which // means the date part defaulted to the epoch. Make // it today, instead. Calendar cal = Calendar.getInstance(); Calendar calNow = Calendar.getInstance(); calNow.setTime (new Date()); cal.setTime (date); cal.set (calNow.get (Calendar.YEAR), calNow.get (Calendar.MONTH), calNow.get (Calendar.DAY_OF_MONTH)); date = cal.getTime(); } break; } } catch (ParseException ex) { } } if (date == null) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.badDateTime", "Bad date/time: \"{0}\"", new Object[] {s}); } return date; } }
src/org/clapper/curn/Tool.java
/*---------------------------------------------------------------------------*\ $Id$ --------------------------------------------------------------------------- This software is released under a Berkeley-style license: Copyright (c) 2004-2006 Brian M. Clapper. All rights reserved. Redistribution and use in source and binary forms are permitted provided that: (1) source distributions retain this entire copyright notice and comment; and (2) modifications made to the software are prominently mentioned, and a copy of the original software (or a pointer to its location) are included. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. Effectively, this means you can do what you want with the software except remove this notice or take advantage of the author's name. If you modify the software and redistribute your modified version, you must indicate that your version is a modification of the original, and you must provide either a pointer to or a copy of the original. \*---------------------------------------------------------------------------*/ package org.clapper.curn; import java.io.IOException; import java.io.File; import java.io.StringWriter; import java.io.PrintWriter; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import org.clapper.curn.parser.RSSChannel; import org.clapper.util.cmdline.CommandLineUtility; import org.clapper.util.cmdline.CommandLineException; import org.clapper.util.cmdline.CommandLineUsageException; import org.clapper.util.cmdline.UsageInfo; import org.clapper.util.config.ConfigurationException; import org.clapper.util.io.WordWrapWriter; import org.clapper.util.logging.Logger; import org.clapper.util.misc.BuildInfo; /** * <p><i>curn</i>: Customizable Utilitarian RSS Notifier.</p> * * <p><i>curn</i> is an RSS reader. It scans a configured set of URLs, each * one representing an RSS feed, and summarizes the results in an * easy-to-read text format. <i>curn</i> keeps track of URLs it's seen * before, using an on-disk cache; when using the cache, it will suppress * displaying URLs it has already reported (though that behavior can be * disabled). <i>curn</i> can be extended to use any RSS parser; its * built-in RSS parser, the * {@link org.clapper.curn.parser.minirss.MiniRSSParser MiniRSSParser} * class, can handle files in * {@link <a href="http://www.atomenabled.org/developers/">Atom</a>} * format (0.3) and RSS formats * {@link <a target="_top" href="http://backend.userland.com/rss091">0.91</a>}, * 0.92, * {@link <a target="_top" href="http://web.resource.org/rss/1.0/">1.0</a>} and * {@link <a target="_top" href="http://blogs.law.harvard.edu/tech/rss">2.0</a>}.</p> * * <p>This class is a command-line wrapper for <i>curn</i>. Run it with * no parameters for a usage summary.</p> * * @version <tt>$Revision$</tt> */ public class Tool extends CommandLineUtility implements PostConfigPlugIn { /*----------------------------------------------------------------------*\ Private Constants \*----------------------------------------------------------------------*/ private static Collection<DateParseInfo> DATE_FORMATS; static { DATE_FORMATS = new ArrayList<DateParseInfo>(); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm:ss a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm:ss", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h:mm a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd hh a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd h a", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd HH:mm:ss", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd HH:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd H:mm", false)); DATE_FORMATS.add (new DateParseInfo ("yyyy/MM/dd", false)); DATE_FORMATS.add (new DateParseInfo ("yy/MM/dd", false)); DATE_FORMATS.add (new DateParseInfo ("hh:mm:ss a", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm:ss", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("hh:mm", true)); DATE_FORMATS.add (new DateParseInfo ("h:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("h:mm", true)); DATE_FORMATS.add (new DateParseInfo ("hh a", true)); DATE_FORMATS.add (new DateParseInfo ("h a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm:ss a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm:ss", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("HH:mm", true)); DATE_FORMATS.add (new DateParseInfo ("H:mm a", true)); DATE_FORMATS.add (new DateParseInfo ("H:mm", true)); }; /*----------------------------------------------------------------------*\ Private Data Items \*----------------------------------------------------------------------*/ private String configPath = null; private boolean useCache = true; private Date currentTime = new Date(); private Collection<String> emailAddresses = new ArrayList<String>(); private boolean optShowBuildInfo = false; private boolean optShowVersion = false; private Boolean optShowDates = null; private Boolean optShowAuthors = null; private Boolean optRSSVersion = null; private Boolean optUpdateCache = null; private boolean useGzip = true; private int maxThreads = 0; /** * For log messages */ private static Logger log = new Logger (Tool.class); /*----------------------------------------------------------------------*\ Main Program \*----------------------------------------------------------------------*/ public static void main (String[] args) { Tool tool = new Tool(); try { tool.execute (args); } catch (CommandLineUsageException ex) { // Already reported System.exit (1); } catch (CommandLineException ex) { WordWrapWriter err = new WordWrapWriter (System.err); //err.println (ex.getMessage()); ex.printStackTrace (System.err); System.exit (1); } catch (Exception ex) { ex.printStackTrace (System.err); System.exit (1); } } /*----------------------------------------------------------------------*\ Constructor \*----------------------------------------------------------------------*/ private Tool() { } /*----------------------------------------------------------------------*\ Public Methods Required by PlugIn Interface \*----------------------------------------------------------------------*/ public String getName() { return "curn command-line interface"; } public void runPostConfigurationPlugIn (CurnConfig config) throws CurnException { try { adjustConfiguration (config); } catch (ConfigurationException ex) { throw new CurnException (ex); } } /*----------------------------------------------------------------------*\ Protected Methods \*----------------------------------------------------------------------*/ /** * Called by <tt>parseParams()</tt> to handle any option it doesn't * recognize. If the option takes any parameters, the overridden * method must extract the parameter by advancing the supplied * <tt>Iterator</tt> (which returns <tt>String</tt> objects). This * default method simply throws an exception. * * @param shortOption the short option character, or * {@link UsageInfo#NO_SHORT_OPTION} if there isn't * one (i.e., if this is a long-only option). * @param longOption the long option string, without any leading * "-" characters, or null if this is a short-only * option * @param it the <tt>Iterator</tt> for the remainder of the * command line, for extracting parameters. * * @throws CommandLineUsageException on error * @throws NoSuchElementException overran the iterator (i.e., missing * parameter) */ protected void parseCustomOption (char shortOption, String longOption, Iterator<String> it) throws CommandLineUsageException, NoSuchElementException { switch (shortOption) { case 'a': // --authors optShowAuthors = Boolean.TRUE; break; case 'A': // --no-authors optShowAuthors = Boolean.FALSE; break; case 'B': // --build-info optShowBuildInfo = true; break; case 'C': // --no-cache useCache = false; break; case 'd': // --show-dates optShowDates = Boolean.TRUE; break; case 'D': // --no-dates optShowDates = Boolean.FALSE; break; case 'r': // --rss-version optRSSVersion = Boolean.TRUE; break; case 'R': // --no-rss-version optRSSVersion = Boolean.FALSE; break; case 't': // --time currentTime = parseDateTime (it.next()); break; case 'T': // --threads String arg = it.next(); try { maxThreads = Integer.parseInt (arg); if (maxThreads < 1) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.negNumericOption", "Value for \"{0}\" (\"{1}\") option must be " + "greater than 0", new Object[] {"-t", "--threads"}); } } catch (NumberFormatException ex) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.badNumericOption", "Bad numeric value \"{0}\" for \"{1}\" (\"{2}\") " + "option", new Object[] { arg, "-T", "--threads" }); } break; case 'u': // --no-update optUpdateCache = Boolean.FALSE; break; case 'v': optShowVersion = true; break; case 'z': // --gzip useGzip = true; break; case 'Z': // --no-gzip useGzip = false; break; default: // Should not happen. throw new IllegalStateException ("(BUG) Unknown option. " + "Why am I here?"); } } /** * <p>Called by <tt>parseParams()</tt> once option parsing is complete, * this method must handle any additional parameters on the command * line. It's not necessary for the method to ensure that the iterator * has the right number of strings left in it. If you attempt to pull * too many parameters from the iterator, it'll throw a * <tt>NoSuchElementException</tt>, which <tt>parseParams()</tt> traps * and converts into a suitable error message. Similarly, if there are * any parameters left in the iterator when this method returns, * <tt>parseParams()</tt> throws an exception indicating that there are * too many parameters on the command line.</p> * * <p>This method is called unconditionally, even if there are no * parameters left on the command line, so it's a useful place to do * post-option consistency checks, as well.</p> * * @param it the <tt>Iterator</tt> for the remainder of the * command line * * @throws CommandLineUsageException on error * @throws NoSuchElementException attempt to iterate past end of args; * <tt>parseParams()</tt> automatically * handles this exception, so it's * safe for subclass implementations of * this method not to handle it */ protected void processPostOptionCommandLine (Iterator<String> it) throws CommandLineUsageException, NoSuchElementException { // If we're showing build information or the version, forget about // the remainder of the command line. if (! (optShowBuildInfo || optShowVersion)) { configPath = it.next(); while (it.hasNext()) emailAddresses.add (it.next()); } } /** * Called by <tt>parseParams()</tt> to get the custom command-line * options and parameters handled by the subclass. This list is used * solely to build a usage message. The overridden method must fill the * supplied <tt>UsageInfo</tt> object: * * <ul> * <li> Each parameter must be added to the object, via the * <tt>UsageInfo.addParameter()</tt> method. The first argument * to <tt>addParameter()</tt> is the parameter string (e.g., * "<dbCfg>" or "input_file"). The second parameter is the * one-line description. The description may be of any length, * but it should be a single line. * * <li> Each option must be added to the object, via the * <tt>UsageInfo.addOption()</tt> method. The first argument to * <tt>addOption()</tt> is the option string (e.g., "-x" or * "-version"). The second parameter is the one-line * description. The description may be of any length, but it * should be a single line. * </ul> * * That information will be combined with the common options supported * by the base class, and used to build a usage message. * * @param info The <tt>UsageInfo</tt> object to fill. */ protected void getCustomUsageInfo (UsageInfo info) { info.addOption ('a', "show-authors", "Show the authors for each item, if available."); info.addOption ('A', "no-authors", "Don't the authors for each item, if available."); info.addOption ('B', "build-info", "Show full build information, then exit. " + "This option shows a bit more information than the " + UsageInfo.LONG_OPTION_PREFIX + "version option"); info.addOption ('C', "no-cache", "Don't use a cache file at all."); info.addOption ('d', "show-dates", "Show dates on feeds and feed items, if available."); info.addOption ('D', "no-dates", "Don't show dates on feeds and feed items."); info.addOption ('r', "rss-version", "Show the RSS version each site uses."); info.addOption ('R', "no-rss-version", "Don't show the RSS version each site uses."); info.addOption ('T', "threads", "<n>", "Set the number of concurrent download threads to " + "<n>. <n> must be greater than 0."); info.addOption ('u', "no-update", "Read the cache, but don't update it."); info.addOption ('v', "version", "Show version information, then exit."); info.addOption ('z', "gzip", "Ask remote HTTP servers to gzip content before " + "sending it."); info.addOption ('Z', "no-gzip", "Don't ask remote HTTP servers to gzip content before " + "sending it."); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter (sw); Date sampleDate; BuildInfo buildInfo = Version.getBuildInfo(); SimpleDateFormat dateFormat; String dateString = buildInfo.getBuildDate(); try { dateFormat = new SimpleDateFormat (BuildInfo.DATE_FORMAT_STRING); sampleDate = dateFormat.parse (dateString); } catch (Exception ex) { log.error ("Can't parse build date string \"" + dateString + "\" using format \"" + BuildInfo.DATE_FORMAT_STRING + "\"", ex); sampleDate = new Date(); } Set<String> printed = new HashSet<String>(); for (DateParseInfo dpi : DATE_FORMATS) { String s = dpi.formatDate (sampleDate); if (! printed.contains (s)) { pw.println(); pw.print (s); printed.add (s); } } info.addOption ('t', "time", "<time>", "For the purposes of cache expiration, pretend the " + "current time is <time>. <time> may be in one of the " + "following formats." + sw.toString()); info.addParameter ("config", "Path to configuration file", true); info.addParameter ("emailAddress ...", "One or more email addresses to receive the output", false); } /** * Run the curn tool. This method parses the command line arguments, * storing the results in an internal configuration; then, it * instantiates a <tt>Curn</tt> object and calls its <tt>run()</tt> * method. * * @throws CommandLineException error occurred */ protected void runCommand() throws CommandLineException { try { if (optShowBuildInfo) Version.showBuildInfo(); else if (optShowVersion) Version.showVersion(); else { // Allocate Curn object, which loads plug-ins. Curn curn = CurnFactory.newCurn(); // Add this object as a plug-in. MetaPlugIn.getMetaPlugIn().addPlugIn (this); // Fire it up. curn.setCurrentTime (currentTime); curn.run (configPath, this.emailAddresses, this.useCache); } } catch (CurnException ex) { throw new CommandLineException (ex); } catch (Exception ex) { ex.printStackTrace (System.err); throw new CommandLineException (ex); } } /*----------------------------------------------------------------------*\ Private Methods \*----------------------------------------------------------------------*/ private void adjustConfiguration (CurnConfig config) throws ConfigurationException { log.debug ("adjustConfiguration() called."); // Adjust the configuration, if necessary, based on the command-line // parameters. if (optShowAuthors != null) config.setShowAuthorsFlag (optShowAuthors.booleanValue()); if (optRSSVersion != null) config.setShowRSSVersionFlag (optRSSVersion.booleanValue()); if (optUpdateCache != null) config.setMustUpdateCacheFlag (optUpdateCache.booleanValue()); if (optShowDates != null) config.setShowDatesFlag (true); if (maxThreads > 0) config.setMaxThreads (maxThreads); config.setRetrieveFeedsWithGzipFlag (useGzip); } private Date parseDateTime (String s) throws CommandLineUsageException { Date date = null; for (DateParseInfo dpi : DATE_FORMATS) { try { date = dpi.format.parse (s); if (date != null) { if (dpi.timeOnly) { // The date pattern specified only a time, which // means the date part defaulted to the epoch. Make // it today, instead. Calendar cal = Calendar.getInstance(); Calendar calNow = Calendar.getInstance(); calNow.setTime (new Date()); cal.setTime (date); cal.set (calNow.get (Calendar.YEAR), calNow.get (Calendar.MONTH), calNow.get (Calendar.DAY_OF_MONTH)); date = cal.getTime(); } break; } } catch (ParseException ex) { } } if (date == null) { throw new CommandLineUsageException (Constants.BUNDLE_NAME, "Tool.badDateTime", "Bad date/time: \"{0}\"", new Object[] {s}); } return date; } }
Emailing output is now handled by a new plug-in. As a result: - curn no longer accepts email addresses on the command line. Instead, email addresses must be specified in the configuration file, using a new "MailOutputTo" configuration option in the main [curn] configuration section. This change was necessary to permit moving email handling into a plug-in. It's also more consistent, since the other email-related parameters (subject, SMTP host, sender) are specified in the configuration file. - curn no longer dumps the output of the first handler to standard output when not emailing output. - Added new PostOutputPlugIn plug-in phase.
src/org/clapper/curn/Tool.java
Emailing output is now handled by a new plug-in. As a result:
<ide><path>rc/org/clapper/curn/Tool.java <ide> private String configPath = null; <ide> private boolean useCache = true; <ide> private Date currentTime = new Date(); <del> private Collection<String> emailAddresses = new ArrayList<String>(); <ide> private boolean optShowBuildInfo = false; <ide> private boolean optShowVersion = false; <ide> private Boolean optShowDates = null; <ide> return "curn command-line interface"; <ide> } <ide> <del> public void runPostConfigurationPlugIn (CurnConfig config) <add> public void runPostConfigPlugIn (CurnConfig config) <ide> throws CurnException <ide> { <ide> try <ide> // the remainder of the command line. <ide> <ide> if (! (optShowBuildInfo || optShowVersion)) <del> { <ide> configPath = it.next(); <del> <del> while (it.hasNext()) <del> emailAddresses.add (it.next()); <del> } <ide> } <ide> <ide> /** <ide> info.addParameter ("config", <ide> "Path to configuration file", <ide> true); <del> info.addParameter ("emailAddress ...", <del> "One or more email addresses to receive the output", <del> false); <ide> } <ide> <ide> /** <ide> // Fire it up. <ide> <ide> curn.setCurrentTime (currentTime); <del> curn.run (configPath, this.emailAddresses, this.useCache); <add> curn.run (configPath, this.useCache); <ide> } <ide> } <ide>
Java
apache-2.0
5b5e234f33d07f502589f271df60d064e26379ed
0
dimone-kun/cuba,dimone-kun/cuba,cuba-platform/cuba,cuba-platform/cuba,cuba-platform/cuba,dimone-kun/cuba
/* * Copyright (c) 2012 Haulmont Technology Ltd. All Rights Reserved. * Haulmont Technology proprietary and confidential. * Use is subject to license terms. */ package com.haulmont.cuba.core.global; import com.haulmont.bali.util.ReflectionHelper; import com.haulmont.chile.core.annotations.NamePattern; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.chile.core.model.Range; import com.haulmont.cuba.core.entity.BaseEntity; import com.haulmont.cuba.core.entity.Entity; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.Element; import org.dom4j.io.SAXReader; import javax.annotation.Nullable; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.*; import java.util.concurrent.ConcurrentHashMap; /** * Class containing all views defined in XML and deployed at runtime.<br> * The reference to the ViewRepository can be obtained through {@link com.haulmont.cuba.core.global.Metadata}. * * @author krivopustov * @version $Id$ */ public class ViewRepository { private List<String> readFileNames = new LinkedList<String>(); private Map<MetaClass, Map<String, View>> storage = new ConcurrentHashMap<MetaClass, Map<String, View>>(); private Metadata metadata; private Resources resources; private static Log log = LogFactory.getLog(ViewRepository.class); public ViewRepository(Metadata metadata, Resources resources) { this.metadata = metadata; this.resources = resources; } /** * Get View for an entity. * @param entityClass entity class * @param name view name * @return view instance. Throws {@link ViewNotFoundException} if not found. */ public View getView(Class<? extends Entity> entityClass, String name) { return getView(metadata.getSession().getClassNN(entityClass), name); } /** * Get View for an entity. * @param metaClass entity class * @param name view name * @return view instance. Throws {@link ViewNotFoundException} if not found. */ public View getView(MetaClass metaClass, String name) { Objects.requireNonNull(metaClass, "MetaClass is null"); View view = findView(metaClass, name); if (view == null) throw new ViewNotFoundException(String.format("View %s/%s not found", metaClass.getName(), name)); return view; } /** * Searches for a View for an entity * @param metaClass entity class * @param name view name * @return view instance or null if no view found */ @Nullable public View findView(MetaClass metaClass, String name) { if (metaClass == null || name == null) return null; // Replace with extended entity if such one exists metaClass = metadata.getExtendedEntities().getEffectiveMetaClass(metaClass); View view = retrieveView(metaClass, name); if (view == null) { MetaClass originalMetaClass = metadata.getExtendedEntities().getOriginalMetaClass(metaClass); if (originalMetaClass != null) { view = retrieveView(originalMetaClass, name); } } return view; } private View deployDefaultView(MetaClass metaClass, String name) { Class<? extends BaseEntity> javaClass = metaClass.getJavaClass(); View view = new View(javaClass, name, false); if (View.LOCAL.equals(name)) { for (MetaProperty property : metaClass.getProperties()) { if (!property.getRange().isClass() && !metadata.getTools().isSystem(property)) { view.addProperty(property.getName()); } } } else if (View.MINIMAL.equals(name)) { NamePattern annotation = javaClass.getAnnotation(NamePattern.class); if (annotation != null) { String pattern = annotation.value(); int pos = pattern.indexOf("|"); if (pos >= 0) { String fieldsStr = StringUtils.substring(pattern, pos + 1); String[] fields = fieldsStr.split("[,;]"); for (String field : fields) { view.addProperty(field); } } } } else throw new UnsupportedOperationException("Unsupported default view: " + name); storeView(metaClass, view); return view; } public void deployViews(String resourceUrl) { if (!readFileNames.contains(resourceUrl)) { log.debug("Deploying views config: " + resourceUrl); InputStream stream = null; try { stream = resources.getResourceAsStream(resourceUrl); if (stream == null) throw new IllegalStateException("Resource is not found: " + resourceUrl); deployViews(stream); readFileNames.add(resourceUrl); } finally { IOUtils.closeQuietly(stream); } } } public void deployViews(InputStream xml) { deployViews(new InputStreamReader(xml)); } public void deployViews(Reader xml) { SAXReader reader = new SAXReader(); Document doc; try { doc = reader.read(xml); } catch (DocumentException e) { throw new RuntimeException(e); } Element rootElem = doc.getRootElement(); for (Element includeElem : (List<Element>) rootElem.elements("include")) { String file = includeElem.attributeValue("file"); if (!StringUtils.isBlank(file)) deployViews(file); } for (Element viewElem : (List<Element>) rootElem.elements("view")) { deployView(rootElem, viewElem); } } protected View retrieveView(MetaClass metaClass, String name) { Map<String, View> views = storage.get(metaClass); View view = (views == null ? null : views.get(name)); if (view == null && (name.equals(View.LOCAL) || name.equals(View.MINIMAL))) { view = deployDefaultView(metaClass, name); } return view; } public View deployView(Element rootElem, Element viewElem) { String viewName = viewElem.attributeValue("name"); if (StringUtils.isBlank(viewName)) throw new IllegalStateException("Invalid view definition: no 'name' attribute"); MetaClass metaClass; String entity = viewElem.attributeValue("entity"); if (StringUtils.isBlank(entity)) { String className = viewElem.attributeValue("class"); if (StringUtils.isBlank(className)) throw new IllegalStateException("Invalid view definition: no 'entity' or 'class' attribute"); Class entityClass = ReflectionHelper.getClass(className); metaClass = metadata.getSession().getClassNN(entityClass); } else { metaClass = metadata.getSession().getClassNN(entity); } View v = retrieveView(metaClass, viewName); boolean overwrite = BooleanUtils.toBoolean(viewElem.attributeValue("overwrite")); if (v != null && !overwrite) return v; String systemProperties = viewElem.attributeValue("systemProperties"); View view; String ancestor = viewElem.attributeValue("extends"); if (ancestor != null) { View ancestorView = getAncestorView(metaClass, ancestor); boolean includeSystemProperties = systemProperties == null ? ancestorView.isIncludeSystemProperties() : Boolean.valueOf(systemProperties); view = new View(ancestorView, metaClass.getJavaClass(), viewName, includeSystemProperties); } else { view = new View(metaClass.getJavaClass(), viewName, Boolean.valueOf(systemProperties)); } loadView(rootElem, viewElem, view); storeView(metaClass, view); return view; } private View getAncestorView(MetaClass metaClass, String ancestor) { View ancestorView = retrieveView(metaClass, ancestor); if (ancestorView == null) { MetaClass originalMetaClass = metadata.getExtendedEntities().getOriginalMetaClass(metaClass); if (originalMetaClass != null) ancestorView = retrieveView(originalMetaClass, ancestor); if (ancestorView == null) throw new IllegalStateException("No ancestor view found: " + ancestor); } return ancestorView; } protected void loadView(Element rootElem, Element viewElem, View view) { final MetaClass metaClass = metadata.getSession().getClassNN(view.getEntityClass()); final String viewName = view.getName(); for (Element propElem : (List<Element>) viewElem.elements("property")) { String propertyName = propElem.attributeValue("name"); MetaProperty metaProperty = metaClass.getProperty(propertyName); if (metaProperty == null) throw new IllegalStateException( String.format("View %s/%s definition error: property %s doesn't exists", metaClass.getName(), viewName, propertyName) ); View refView = null; String refViewName = propElem.attributeValue("view"); MetaClass refMetaClass; Range range = metaProperty.getRange(); if (range == null) { throw new RuntimeException("cannot find range for meta property: " + metaProperty); } final List<Element> propertyElements = propElem.elements("property"); boolean inlineView = !propertyElements.isEmpty(); if (refViewName != null && !inlineView) { if (!range.isClass()) throw new IllegalStateException( String.format("View %s/%s definition error: property %s is not an entity", metaClass.getName(), viewName, propertyName) ); refMetaClass = getMetaClass(propElem, range); refView = retrieveView(refMetaClass, refViewName); if (refView == null) { for (Element e : (List<Element>) rootElem.elements("view")) { if ((refMetaClass.getName().equals(e.attributeValue("entity")) || refMetaClass.getJavaClass().getName().equals(e.attributeValue("class"))) && refViewName.equals(e.attributeValue("name"))) { refView = deployView(rootElem, e); break; } } if (refView == null) throw new IllegalStateException( String.format( "View %s/%s definition error: unable to find/deploy referenced view %s/%s", metaClass.getName(), viewName, range.asClass().getName(), refViewName) ); } } if (range.isClass() && refView == null && inlineView) { // try to import anonymous views String ancestorViewName = propElem.attributeValue("view"); if (ancestorViewName == null) { refView = new View(range.asClass().getJavaClass()); } else { refMetaClass = getMetaClass(propElem, range); View ancestorView = getAncestorView(refMetaClass, ancestorViewName); refView = new View(ancestorView, range.asClass().getJavaClass(), refViewName, true); } loadView(rootElem, propElem, refView); } boolean lazy = Boolean.valueOf(propElem.attributeValue("lazy")); view.addProperty(propertyName, refView, lazy); } } private MetaClass getMetaClass(Element propElem, Range range) { MetaClass refMetaClass; String refEntityName = propElem.attributeValue("entity"); // this attribute is deprecated if (refEntityName == null) { refMetaClass = range.asClass(); } else { refMetaClass = metadata.getSession().getClass(refEntityName); } return refMetaClass; } public void storeView(MetaClass metaClass, View view) { Map<String, View> views = storage.get(metaClass); if (views == null) { views = new ConcurrentHashMap<String, View>(); } views.put(view.getName(), view); storage.put(metaClass, views); } public List<View> getAll() { List<View> list = new ArrayList<View>(); for (Map<String, View> viewMap : storage.values()) { list.addAll(viewMap.values()); } return list; } }
modules/global/src/com/haulmont/cuba/core/global/ViewRepository.java
/* * Copyright (c) 2012 Haulmont Technology Ltd. All Rights Reserved. * Haulmont Technology proprietary and confidential. * Use is subject to license terms. */ package com.haulmont.cuba.core.global; import com.haulmont.bali.util.ReflectionHelper; import com.haulmont.chile.core.annotations.NamePattern; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.chile.core.model.Range; import com.haulmont.cuba.core.entity.BaseEntity; import com.haulmont.cuba.core.entity.Entity; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.BooleanUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.Element; import org.dom4j.io.SAXReader; import javax.annotation.Nullable; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.*; import java.util.concurrent.ConcurrentHashMap; /** * Class containing all views defined in XML and deployed at runtime.<br> * The reference to the ViewRepository can be obtained through {@link com.haulmont.cuba.core.global.MetadataProvider} * * @author krivopustov * @version $Id$ */ public class ViewRepository { private List<String> readFileNames = new LinkedList<String>(); private Map<MetaClass, Map<String, View>> storage = new ConcurrentHashMap<MetaClass, Map<String, View>>(); private Metadata metadata; private Resources resources; private static Log log = LogFactory.getLog(ViewRepository.class); public ViewRepository(Metadata metadata, Resources resources) { this.metadata = metadata; this.resources = resources; } /** * Get View for an entity. * @param entityClass entity class * @param name view name * @return view instance. Throws {@link ViewNotFoundException} if not found. */ public View getView(Class<? extends Entity> entityClass, String name) { return getView(metadata.getSession().getClassNN(entityClass), name); } /** * Get View for an entity. * @param metaClass entity class * @param name view name * @return view instance. Throws {@link ViewNotFoundException} if not found. */ public View getView(MetaClass metaClass, String name) { Objects.requireNonNull(metaClass, "MetaClass is null"); View view = findView(metaClass, name); if (view == null) throw new ViewNotFoundException(String.format("View %s/%s not found", metaClass.getName(), name)); return view; } /** * Searches for a View for an entity * @param metaClass entity class * @param name view name * @return view instance or null if no view found */ @Nullable public View findView(MetaClass metaClass, String name) { if (metaClass == null || name == null) return null; // Replace with extended entity if such one exists metaClass = metadata.getExtendedEntities().getEffectiveMetaClass(metaClass); View view = retrieveView(metaClass, name); if (view == null) { MetaClass originalMetaClass = metadata.getExtendedEntities().getOriginalMetaClass(metaClass); if (originalMetaClass != null) { view = retrieveView(originalMetaClass, name); } } return view; } private View deployDefaultView(MetaClass metaClass, String name) { Class<? extends BaseEntity> javaClass = metaClass.getJavaClass(); View view = new View(javaClass, name, false); if (View.LOCAL.equals(name)) { for (MetaProperty property : metaClass.getProperties()) { if (!property.getRange().isClass() && !metadata.getTools().isSystem(property)) { view.addProperty(property.getName()); } } } else if (View.MINIMAL.equals(name)) { NamePattern annotation = javaClass.getAnnotation(NamePattern.class); if (annotation != null) { String pattern = annotation.value(); int pos = pattern.indexOf("|"); if (pos >= 0) { String fieldsStr = StringUtils.substring(pattern, pos + 1); String[] fields = fieldsStr.split("[,;]"); for (String field : fields) { view.addProperty(field); } } } } else throw new UnsupportedOperationException("Unsupported default view: " + name); storeView(metaClass, view); return view; } public void deployViews(String resourceUrl) { if (!readFileNames.contains(resourceUrl)) { log.debug("Deploying views config: " + resourceUrl); InputStream stream = null; try { stream = resources.getResourceAsStream(resourceUrl); if (stream == null) throw new IllegalStateException("Resource is not found: " + resourceUrl); deployViews(stream); readFileNames.add(resourceUrl); } finally { IOUtils.closeQuietly(stream); } } } public void deployViews(InputStream xml) { deployViews(new InputStreamReader(xml)); } public void deployViews(Reader xml) { SAXReader reader = new SAXReader(); Document doc; try { doc = reader.read(xml); } catch (DocumentException e) { throw new RuntimeException(e); } Element rootElem = doc.getRootElement(); for (Element includeElem : (List<Element>) rootElem.elements("include")) { String file = includeElem.attributeValue("file"); if (!StringUtils.isBlank(file)) deployViews(file); } for (Element viewElem : (List<Element>) rootElem.elements("view")) { deployView(rootElem, viewElem); } } protected View retrieveView(MetaClass metaClass, String name) { Map<String, View> views = storage.get(metaClass); View view = (views == null ? null : views.get(name)); if (view == null && (name.equals(View.LOCAL) || name.equals(View.MINIMAL))) { view = deployDefaultView(metaClass, name); } return view; } public View deployView(Element rootElem, Element viewElem) { String viewName = viewElem.attributeValue("name"); if (StringUtils.isBlank(viewName)) throw new IllegalStateException("Invalid view definition: no 'name' attribute"); MetaClass metaClass; String entity = viewElem.attributeValue("entity"); if (StringUtils.isBlank(entity)) { String className = viewElem.attributeValue("class"); if (StringUtils.isBlank(className)) throw new IllegalStateException("Invalid view definition: no 'entity' or 'class' attribute"); Class entityClass = ReflectionHelper.getClass(className); metaClass = metadata.getSession().getClassNN(entityClass); } else { metaClass = metadata.getSession().getClassNN(entity); } View v = retrieveView(metaClass, viewName); boolean overwrite = BooleanUtils.toBoolean(viewElem.attributeValue("overwrite")); if (v != null && !overwrite) return v; String systemProperties = viewElem.attributeValue("systemProperties"); View view; String ancestor = viewElem.attributeValue("extends"); if (ancestor != null) { View ancestorView = getAncestorView(metaClass, ancestor); boolean includeSystemProperties = systemProperties == null ? ancestorView.isIncludeSystemProperties() : Boolean.valueOf(systemProperties); view = new View(ancestorView, metaClass.getJavaClass(), viewName, includeSystemProperties); } else { view = new View(metaClass.getJavaClass(), viewName, Boolean.valueOf(systemProperties)); } loadView(rootElem, viewElem, view); storeView(metaClass, view); return view; } private View getAncestorView(MetaClass metaClass, String ancestor) { View ancestorView = retrieveView(metaClass, ancestor); if (ancestorView == null) { MetaClass originalMetaClass = metadata.getExtendedEntities().getOriginalMetaClass(metaClass); if (originalMetaClass != null) ancestorView = retrieveView(originalMetaClass, ancestor); if (ancestorView == null) throw new IllegalStateException("No ancestor view found: " + ancestor); } return ancestorView; } protected void loadView(Element rootElem, Element viewElem, View view) { final MetaClass metaClass = metadata.getSession().getClassNN(view.getEntityClass()); final String viewName = view.getName(); for (Element propElem : (List<Element>) viewElem.elements("property")) { String propertyName = propElem.attributeValue("name"); MetaProperty metaProperty = metaClass.getProperty(propertyName); if (metaProperty == null) throw new IllegalStateException( String.format("View %s/%s definition error: property %s doesn't exists", metaClass.getName(), viewName, propertyName) ); View refView = null; String refViewName = propElem.attributeValue("view"); MetaClass refMetaClass; Range range = metaProperty.getRange(); if (range == null) { throw new RuntimeException("cannot find range for meta property: " + metaProperty); } final List<Element> propertyElements = propElem.elements("property"); boolean inlineView = !propertyElements.isEmpty(); if (refViewName != null && !inlineView) { if (!range.isClass()) throw new IllegalStateException( String.format("View %s/%s definition error: property %s is not an entity", metaClass.getName(), viewName, propertyName) ); refMetaClass = getMetaClass(propElem, range); refView = retrieveView(refMetaClass, refViewName); if (refView == null) { for (Element e : (List<Element>) rootElem.elements("view")) { if ((refMetaClass.getName().equals(e.attributeValue("entity")) || refMetaClass.getJavaClass().getName().equals(e.attributeValue("class"))) && refViewName.equals(e.attributeValue("name"))) { refView = deployView(rootElem, e); break; } } if (refView == null) throw new IllegalStateException( String.format( "View %s/%s definition error: unable to find/deploy referenced view %s/%s", metaClass.getName(), viewName, range.asClass().getName(), refViewName) ); } } if (range.isClass() && refView == null && inlineView) { // try to import anonymous views String ancestorViewName = propElem.attributeValue("view"); if (ancestorViewName == null) { refView = new View(range.asClass().getJavaClass()); } else { refMetaClass = getMetaClass(propElem, range); View ancestorView = getAncestorView(refMetaClass, ancestorViewName); refView = new View(ancestorView, range.asClass().getJavaClass(), refViewName, true); } loadView(rootElem, propElem, refView); } boolean lazy = Boolean.valueOf(propElem.attributeValue("lazy")); view.addProperty(propertyName, refView, lazy); } } private MetaClass getMetaClass(Element propElem, Range range) { MetaClass refMetaClass; String refEntityName = propElem.attributeValue("entity"); // this attribute is deprecated if (refEntityName == null) { refMetaClass = range.asClass(); } else { refMetaClass = metadata.getSession().getClass(refEntityName); } return refMetaClass; } public void storeView(MetaClass metaClass, View view) { Map<String, View> views = storage.get(metaClass); if (views == null) { views = new ConcurrentHashMap<String, View>(); } views.put(view.getName(), view); storage.put(metaClass, views); } public List<View> getAll() { List<View> list = new ArrayList<View>(); for (Map<String, View> viewMap : storage.values()) { list.addAll(viewMap.values()); } return list; } }
Refs #1346 Better error reporting and JavaDocs
modules/global/src/com/haulmont/cuba/core/global/ViewRepository.java
Refs #1346 Better error reporting and JavaDocs
<ide><path>odules/global/src/com/haulmont/cuba/core/global/ViewRepository.java <ide> <ide> /** <ide> * Class containing all views defined in XML and deployed at runtime.<br> <del> * The reference to the ViewRepository can be obtained through {@link com.haulmont.cuba.core.global.MetadataProvider} <add> * The reference to the ViewRepository can be obtained through {@link com.haulmont.cuba.core.global.Metadata}. <ide> * <ide> * @author krivopustov <ide> * @version $Id$
Java
apache-2.0
87bd00c0b875d05a59f1826f241e5b7fe6ebac14
0
fitermay/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,diorcety/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,FHannes/intellij-community,amith01994/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,caot/intellij-community,blademainer/intellij-community,samthor/intellij-community,fnouama/intellij-community,asedunov/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,signed/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,allotria/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,izonder/intellij-community,Lekanich/intellij-community,da1z/intellij-community,caot/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,signed/intellij-community,adedayo/intellij-community,asedunov/intellij-community,semonte/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,slisson/intellij-community,nicolargo/intellij-community,da1z/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,supersven/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,izonder/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,samthor/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,signed/intellij-community,semonte/intellij-community,holmes/intellij-community,da1z/intellij-community,consulo/consulo,petteyg/intellij-community,jagguli/intellij-community,consulo/consulo,Distrotech/intellij-community,holmes/intellij-community,da1z/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,semonte/intellij-community,xfournet/intellij-community,ibinti/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,caot/intellij-community,jagguli/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,asedunov/intellij-community,izonder/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,petteyg/intellij-community,fnouama/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,clumsy/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,asedunov/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,fitermay/intellij-community,kool79/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,fnouama/intellij-community,ernestp/consulo,fengbaicanhe/intellij-community,ibinti/intellij-community,izonder/intellij-community,holmes/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ryano144/intellij-community,amith01994/intellij-community,da1z/intellij-community,dslomov/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,caot/intellij-community,holmes/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,petteyg/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,kool79/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,caot/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,diorcety/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,samthor/intellij-community,gnuhub/intellij-community,kool79/intellij-community,hurricup/intellij-community,vladmm/intellij-community,fnouama/intellij-community,fnouama/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,samthor/intellij-community,blademainer/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,ernestp/consulo,vvv1559/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,robovm/robovm-studio,blademainer/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,allotria/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,caot/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,signed/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,FHannes/intellij-community,hurricup/intellij-community,kool79/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,kool79/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,asedunov/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,signed/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,ernestp/consulo,izonder/intellij-community,kdwink/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,da1z/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,diorcety/intellij-community,supersven/intellij-community,robovm/robovm-studio,signed/intellij-community,asedunov/intellij-community,adedayo/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,signed/intellij-community,salguarnieri/intellij-community,signed/intellij-community,jagguli/intellij-community,semonte/intellij-community,kdwink/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,clumsy/intellij-community,dslomov/intellij-community,amith01994/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,dslomov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,blademainer/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,caot/intellij-community,robovm/robovm-studio,diorcety/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,orekyuu/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,asedunov/intellij-community,supersven/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,allotria/intellij-community,ibinti/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,retomerz/intellij-community,fitermay/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,apixandru/intellij-community,hurricup/intellij-community,clumsy/intellij-community,retomerz/intellij-community,ibinti/intellij-community,izonder/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,caot/intellij-community,kool79/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,signed/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,consulo/consulo,slisson/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,ernestp/consulo,vladmm/intellij-community,supersven/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,petteyg/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,hurricup/intellij-community,blademainer/intellij-community,ibinti/intellij-community,dslomov/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,robovm/robovm-studio,izonder/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,idea4bsd/idea4bsd,ryano144/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,slisson/intellij-community,apixandru/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,holmes/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,retomerz/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,FHannes/intellij-community,clumsy/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,dslomov/intellij-community,jagguli/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,supersven/intellij-community,semonte/intellij-community,jagguli/intellij-community,caot/intellij-community,consulo/consulo,kdwink/intellij-community,gnuhub/intellij-community,allotria/intellij-community,wreckJ/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,hurricup/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,asedunov/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,fitermay/intellij-community,petteyg/intellij-community,signed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,signed/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,caot/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,caot/intellij-community,ernestp/consulo,allotria/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,slisson/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,akosyakov/intellij-community,holmes/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,semonte/intellij-community,caot/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,allotria/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,samthor/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,da1z/intellij-community,semonte/intellij-community,consulo/consulo,hurricup/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,samthor/intellij-community,allotria/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.codeInsight; import com.intellij.codeHighlighting.Pass; import com.intellij.codeInsight.daemon.DaemonBundle; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettings; import com.intellij.codeInsight.daemon.LineMarkerInfo; import com.intellij.codeInsight.daemon.impl.*; import com.intellij.ide.util.MethodCellRenderer; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.CodeInsightColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.SeparatorPlacement; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.presentation.java.ClassPresentationUtil; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.search.PsiElementProcessorAdapter; import com.intellij.psi.search.searches.AllOverridingMethodsSearch; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.psi.search.searches.SuperMethodsSearch; import com.intellij.psi.util.MethodSignatureBackedByPsiMethod; import com.intellij.psi.util.PsiUtil; import com.intellij.util.CommonProcessors; import com.intellij.util.Function; import com.intellij.util.FunctionUtil; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashSet; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocComment; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocCommentOwner; import org.jetbrains.plugins.groovy.lang.lexer.TokenSets; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.GrVariableDeclarationBase; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyPropertyUtils; import javax.swing.*; import java.awt.event.MouseEvent; import java.text.MessageFormat; import java.util.*; /** * @author ilyas * Same logic as for Java LMP */ public class GroovyLineMarkerProvider extends JavaLineMarkerProvider { private static final Logger LOG = Logger.getInstance(GroovyLineMarkerProvider.class); private static final MarkerType OVERRIDING_PROPERTY_TYPE = new MarkerType(new Function<PsiElement, String>() { @Nullable @Override public String fun(PsiElement psiElement) { final PsiElement parent = psiElement.getParent(); if (!(parent instanceof GrField)) return null; final GrField field = (GrField)parent; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors(field); StringBuilder builder = new StringBuilder(); builder.append("<html><body>"); int count = 0; String sep = ""; for (GrAccessorMethod method : accessors) { PsiMethod[] superMethods = method.findSuperMethods(false); count += superMethods.length; if (superMethods.length == 0) continue; PsiMethod superMethod = superMethods[0]; boolean isAbstract = method.hasModifierProperty(PsiModifier.ABSTRACT); boolean isSuperAbstract = superMethod.hasModifierProperty(PsiModifier.ABSTRACT); @NonNls final String key; if (isSuperAbstract && !isAbstract) { key = "method.implements.in"; } else { key = "method.overrides.in"; } builder.append(sep); sep = "<br>"; composeText(superMethods, DaemonBundle.message(key), builder); } if (count == 0) return null; builder.append("</html></body>"); return builder.toString(); } }, new LineMarkerNavigator() { public void browse(MouseEvent e, PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return; final GrField field = (GrField)parent; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors(field); final ArrayList<PsiMethod> superMethods = new ArrayList<PsiMethod>(); for (GrAccessorMethod method : accessors) { Collections.addAll(superMethods, method.findSuperMethods(false)); } if (superMethods.size() == 0) return; final PsiMethod[] supers = ContainerUtil.toArray(superMethods, new PsiMethod[superMethods.size()]); boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature(supers); PsiElementListNavigator.openTargets(e, supers, DaemonBundle.message("navigation.title.super.method", field.getName()), new MethodCellRenderer(showMethodNames)); } }); private static final MarkerType OVERRIDEN_PROPERTY_TYPE = new MarkerType(new Function<PsiElement, String>() { @Nullable @Override public String fun(PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return null; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors((GrField)parent); PsiElementProcessor.CollectElementsWithLimit<PsiMethod> processor = new PsiElementProcessor.CollectElementsWithLimit<PsiMethod>(5); for (GrAccessorMethod method : accessors) { OverridingMethodsSearch.search(method, method.getUseScope(), true).forEach(new PsiElementProcessorAdapter<PsiMethod>(processor)); } if (processor.isOverflow()) { return DaemonBundle.message("method.is.overridden.too.many"); } PsiMethod[] overridings = processor.toArray(new PsiMethod[processor.getCollection().size()]); if (overridings.length == 0) return null; Comparator<PsiMethod> comparator = new MethodCellRenderer(false).getComparator(); Arrays.sort(overridings, comparator); String start = DaemonBundle.message("method.is.overriden.header"); @NonNls String pattern = "&nbsp;&nbsp;&nbsp;&nbsp;{1}"; return GutterIconTooltipHelper.composeText(overridings, start, pattern); } }, new LineMarkerNavigator() { @Override public void browse(MouseEvent e, PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return; if (DumbService.isDumb(element.getProject())) { DumbService.getInstance(element.getProject()).showDumbModeNotification("Navigation to overriding classes is not possible during index update"); return; } final GrField field = (GrField)parent; final CommonProcessors.CollectProcessor<PsiMethod> collectProcessor = new CommonProcessors.CollectProcessor<PsiMethod>(new THashSet<PsiMethod>()); if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() { public void run() { for (GrAccessorMethod method : GroovyPropertyUtils.getFieldAccessors(field)) { OverridingMethodsSearch.search(method, method.getUseScope(), true).forEach(collectProcessor); } } }, "Searching for overriding methods", true, field.getProject(), (JComponent)e.getComponent())) { return; } PsiMethod[] overridings = collectProcessor.toArray(PsiMethod.EMPTY_ARRAY); if (overridings.length == 0) return; String title = DaemonBundle.message("navigation.title.overrider.method", field.getName(), overridings.length); boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature(overridings); MethodCellRenderer renderer = new MethodCellRenderer(showMethodNames); Arrays.sort(overridings, renderer.getComparator()); PsiElementListNavigator.openTargets(e, overridings, title, renderer); } } ); private static StringBuilder composeText(@NotNull PsiElement[] elements, final String pattern, StringBuilder result) { Set<String> names = new LinkedHashSet<String>(); for (PsiElement element : elements) { String methodName = ((PsiMethod)element).getName(); PsiClass aClass = ((PsiMethod)element).getContainingClass(); String className = aClass == null ? "" : ClassPresentationUtil.getNameForClass(aClass, true); names.add(MessageFormat.format(pattern, methodName, className)); } @NonNls String sep = ""; for (String name : names) { result.append(sep); sep = "<br>"; result.append(name); } return result; } public GroovyLineMarkerProvider(DaemonCodeAnalyzerSettings daemonSettings, EditorColorsManager colorsManager) { super(daemonSettings, colorsManager); } @Override public LineMarkerInfo getLineMarkerInfo(final PsiElement element) { final PsiElement parent = element.getParent(); if (parent instanceof PsiNameIdentifierOwner) { if (parent instanceof GrField) { for (GrAccessorMethod method : GroovyPropertyUtils.getFieldAccessors((GrField)parent)) { MethodSignatureBackedByPsiMethod superSignature = null; try { superSignature = SuperMethodsSearch.search(method, null, true, false).findFirst(); } catch (IndexNotReadyException e) { //some searchers (EJB) require indices. What shall we do? } if (superSignature != null) { boolean overrides = method.hasModifierProperty(PsiModifier.ABSTRACT) == superSignature.getMethod().hasModifierProperty(PsiModifier.ABSTRACT); final Icon icon = overrides ? OVERRIDING_METHOD_ICON : IMPLEMENTING_METHOD_ICON; final MarkerType type = OVERRIDING_PROPERTY_TYPE; return new LineMarkerInfo<PsiElement>(element, element.getTextRange(), icon, Pass.UPDATE_ALL, type.getTooltip(), type.getNavigationHandler(), GutterIconRenderer.Alignment.LEFT); } } } final ASTNode node = element.getNode(); if (node != null && TokenSets.PROPERTY_NAMES.contains(node.getElementType())) { return super.getLineMarkerInfo(((PsiNameIdentifierOwner)parent).getNameIdentifier()); } } //need to draw method separator above docComment if (myDaemonSettings.SHOW_METHOD_SEPARATORS && element.getFirstChild() == null) { PsiElement element1 = element; boolean isMember = false; while (element1 != null && !(element1 instanceof PsiFile) && element1.getPrevSibling() == null) { element1 = element1.getParent(); if (element1 instanceof PsiMember || element1 instanceof GrVariableDeclarationBase) { isMember = true; break; } } if (isMember && !(element1 instanceof PsiAnonymousClass || element1.getParent() instanceof PsiAnonymousClass)) { boolean drawSeparator = false; int category = getGroovyCategory(element1); for (PsiElement child = element1.getPrevSibling(); child != null; child = child.getPrevSibling()) { int category1 = getGroovyCategory(child); if (category1 == 0) continue; drawSeparator = category != 1 || category1 != 1; break; } if (drawSeparator) { GrDocComment comment = null; if (element1 instanceof GrDocCommentOwner) { comment = ((GrDocCommentOwner)element1).getDocComment(); } LineMarkerInfo info = new LineMarkerInfo<PsiElement>(element, comment != null ? comment.getTextRange() : element.getTextRange(), null, Pass.UPDATE_ALL, FunctionUtil.<Object, String>nullConstant(), null, GutterIconRenderer.Alignment.RIGHT); EditorColorsScheme scheme = myColorsManager.getGlobalScheme(); info.separatorColor = scheme.getColor(CodeInsightColors.METHOD_SEPARATORS_COLOR); info.separatorPlacement = SeparatorPlacement.TOP; return info; } } } return super.getLineMarkerInfo(element); } private static int getGroovyCategory(PsiElement element) { if (element instanceof GrVariableDeclarationBase) { GrVariable[] variables = ((GrVariableDeclarationBase)element).getVariables(); if (variables.length == 1 && variables[0] instanceof GrField && variables[0].getInitializerGroovy() instanceof GrClosableBlock) { return 2; } } return JavaLineMarkerProvider.getCategory(element); } @Override public void collectSlowLineMarkers(final List<PsiElement> elements, final Collection<LineMarkerInfo> result) { List<GrField> fields = new ArrayList<GrField>(); for (PsiElement element : elements) { if (!(element instanceof GrField)) continue; fields.add((GrField)element); } collectOverridingMethods(fields, result); super.collectSlowLineMarkers(elements, result); } private static void collectOverridingMethods(final List<GrField> fields, Collection<LineMarkerInfo> result) { final Set<GrField> overridden = new HashSet<GrField>(); final HashSet<GrAccessorMethod> accessors = new HashSet<GrAccessorMethod>(); Set<PsiClass> classes = new THashSet<PsiClass>(); for (GrField field : fields) { ProgressManager.checkCanceled(); final PsiClass parentClass = field.getContainingClass(); if (!"java.lang.Object".equals(parentClass.getQualifiedName())) { classes.add(parentClass); } accessors.addAll(GroovyPropertyUtils.getFieldAccessors(field)); } for (final PsiClass aClass : classes) { try { AllOverridingMethodsSearch.search(aClass).forEach(new Processor<Pair<PsiMethod, PsiMethod>>() { public boolean process(final Pair<PsiMethod, PsiMethod> pair) { ProgressManager.checkCanceled(); final PsiMethod superMethod = pair.getFirst(); if (isCorrectTarget(superMethod) && isCorrectTarget(pair.getSecond())) { if (accessors.remove(superMethod)) { LOG.assertTrue(superMethod instanceof GrAccessorMethod); overridden.add(((GrAccessorMethod)superMethod).getProperty()); } } return !fields.isEmpty(); } }); } catch (IndexNotReadyException ignored) { } } for (GrField field : overridden) { final Icon icon = OVERRIDEN_METHOD_MARKER_RENDERER; PsiElement range; range = field.getNameIdentifierGroovy(); final MarkerType type = OVERRIDEN_PROPERTY_TYPE; LineMarkerInfo info = new LineMarkerInfo<PsiElement>(range, range.getTextRange(), icon, Pass.UPDATE_OVERRIDEN_MARKERS, type.getTooltip(), type.getNavigationHandler(), GutterIconRenderer.Alignment.RIGHT); result.add(info); } } private static boolean isCorrectTarget(PsiMethod superMethod) { final PsiElement navigationElement = superMethod.getNavigationElement(); return superMethod.isPhysical() || navigationElement.isPhysical() && !(navigationElement instanceof PsiClass); } }
plugins/groovy/src/org/jetbrains/plugins/groovy/codeInsight/GroovyLineMarkerProvider.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.codeInsight; import com.intellij.codeHighlighting.Pass; import com.intellij.codeInsight.daemon.DaemonBundle; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzerSettings; import com.intellij.codeInsight.daemon.LineMarkerInfo; import com.intellij.codeInsight.daemon.impl.*; import com.intellij.ide.util.MethodCellRenderer; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.CodeInsightColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.SeparatorPlacement; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.presentation.java.ClassPresentationUtil; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.search.PsiElementProcessorAdapter; import com.intellij.psi.search.searches.AllOverridingMethodsSearch; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.psi.search.searches.SuperMethodsSearch; import com.intellij.psi.util.MethodSignatureBackedByPsiMethod; import com.intellij.psi.util.PsiUtil; import com.intellij.util.CommonProcessors; import com.intellij.util.Function; import com.intellij.util.FunctionUtil; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashSet; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocComment; import org.jetbrains.plugins.groovy.lang.groovydoc.psi.api.GrDocCommentOwner; import org.jetbrains.plugins.groovy.lang.lexer.TokenSets; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.GrVariableDeclarationBase; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyPropertyUtils; import javax.swing.*; import java.awt.event.MouseEvent; import java.text.MessageFormat; import java.util.*; /** * @author ilyas * Same logic as for Java LMP */ public class GroovyLineMarkerProvider extends JavaLineMarkerProvider { private static final Logger LOG = Logger.getInstance(GroovyLineMarkerProvider.class); private static final MarkerType OVERRIDING_PROPERTY_TYPE = new MarkerType(new Function<PsiElement, String>() { @Nullable @Override public String fun(PsiElement psiElement) { final PsiElement parent = psiElement.getParent(); if (!(parent instanceof GrField)) return null; final GrField field = (GrField)parent; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors(field); StringBuilder builder = new StringBuilder(); builder.append("<html><body>"); int count = 0; String sep = ""; for (GrAccessorMethod method : accessors) { PsiMethod[] superMethods = method.findSuperMethods(false); count += superMethods.length; if (superMethods.length == 0) continue; PsiMethod superMethod = superMethods[0]; boolean isAbstract = method.hasModifierProperty(PsiModifier.ABSTRACT); boolean isSuperAbstract = superMethod.hasModifierProperty(PsiModifier.ABSTRACT); @NonNls final String key; if (isSuperAbstract && !isAbstract) { key = "method.implements.in"; } else { key = "method.overrides.in"; } builder.append(sep); sep = "<br>"; composeText(superMethods, DaemonBundle.message(key), builder); } if (count == 0) return null; builder.append("</html></body>"); return builder.toString(); } }, new LineMarkerNavigator() { public void browse(MouseEvent e, PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return; final GrField field = (GrField)parent; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors(field); final ArrayList<PsiMethod> superMethods = new ArrayList<PsiMethod>(); for (GrAccessorMethod method : accessors) { Collections.addAll(superMethods, method.findSuperMethods(false)); } if (superMethods.size() == 0) return; final PsiMethod[] supers = ContainerUtil.toArray(superMethods, new PsiMethod[superMethods.size()]); boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature(supers); PsiElementListNavigator.openTargets(e, supers, DaemonBundle.message("navigation.title.super.method", field.getName()), new MethodCellRenderer(showMethodNames)); } }); private static final MarkerType OVERRIDEN_PROPERTY_TYPE = new MarkerType(new Function<PsiElement, String>() { @Nullable @Override public String fun(PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return null; final List<GrAccessorMethod> accessors = GroovyPropertyUtils.getFieldAccessors((GrField)parent); PsiElementProcessor.CollectElementsWithLimit<PsiMethod> processor = new PsiElementProcessor.CollectElementsWithLimit<PsiMethod>(5); for (GrAccessorMethod method : accessors) { OverridingMethodsSearch.search(method, method.getUseScope(), true).forEach(new PsiElementProcessorAdapter<PsiMethod>(processor)); } if (processor.isOverflow()) { return DaemonBundle.message("method.is.overridden.too.many"); } PsiMethod[] overridings = processor.toArray(new PsiMethod[processor.getCollection().size()]); if (overridings.length == 0) return null; Comparator<PsiMethod> comparator = new MethodCellRenderer(false).getComparator(); Arrays.sort(overridings, comparator); String start = DaemonBundle.message("method.is.overriden.header"); @NonNls String pattern = "&nbsp;&nbsp;&nbsp;&nbsp;{1}"; return GutterIconTooltipHelper.composeText(overridings, start, pattern); } }, new LineMarkerNavigator() { @Override public void browse(MouseEvent e, PsiElement element) { PsiElement parent = element.getParent(); if (!(parent instanceof GrField)) return; if (DumbService.isDumb(element.getProject())) { DumbService.getInstance(element.getProject()).showDumbModeNotification("Navigation to overriding classes is not possible during index update"); return; } final GrField field = (GrField)parent; final CommonProcessors.CollectProcessor<PsiMethod> collectProcessor = new CommonProcessors.CollectProcessor<PsiMethod>(new THashSet<PsiMethod>()); if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() { public void run() { for (GrAccessorMethod method : GroovyPropertyUtils.getFieldAccessors(field)) { OverridingMethodsSearch.search(method, method.getUseScope(), true).forEach(collectProcessor); } } }, "Searching for overriding methods", true, field.getProject(), (JComponent)e.getComponent())) { return; } PsiMethod[] overridings = collectProcessor.toArray(PsiMethod.EMPTY_ARRAY); if (overridings.length == 0) return; String title = DaemonBundle.message("navigation.title.overrider.method", field.getName(), overridings.length); boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature(overridings); MethodCellRenderer renderer = new MethodCellRenderer(showMethodNames); Arrays.sort(overridings, renderer.getComparator()); PsiElementListNavigator.openTargets(e, overridings, title, renderer); } } ); private static StringBuilder composeText(@NotNull PsiElement[] elements, final String pattern, StringBuilder result) { Set<String> names = new LinkedHashSet<String>(); for (PsiElement element : elements) { String methodName = ((PsiMethod)element).getName(); PsiClass aClass = ((PsiMethod)element).getContainingClass(); String className = aClass == null ? "" : ClassPresentationUtil.getNameForClass(aClass, true); names.add(MessageFormat.format(pattern, methodName, className)); } @NonNls String sep = ""; for (String name : names) { result.append(sep); sep = "<br>"; result.append(name); } return result; } public GroovyLineMarkerProvider(DaemonCodeAnalyzerSettings daemonSettings, EditorColorsManager colorsManager) { super(daemonSettings, colorsManager); } @Override public LineMarkerInfo getLineMarkerInfo(final PsiElement element) { final PsiElement parent = element.getParent(); if (parent instanceof PsiNameIdentifierOwner) { if (parent instanceof GrField) { for (GrAccessorMethod method : GroovyPropertyUtils.getFieldAccessors((GrField)parent)) { MethodSignatureBackedByPsiMethod superSignature = null; try { superSignature = SuperMethodsSearch.search(method, null, true, false).findFirst(); } catch (IndexNotReadyException e) { //some searchers (EJB) require indices. What shall we do? } if (superSignature != null) { boolean overrides = method.hasModifierProperty(PsiModifier.ABSTRACT) == superSignature.getMethod().hasModifierProperty(PsiModifier.ABSTRACT); final Icon icon = overrides ? OVERRIDING_METHOD_ICON : IMPLEMENTING_METHOD_ICON; final MarkerType type = OVERRIDING_PROPERTY_TYPE; return new LineMarkerInfo<PsiElement>(element, element.getTextRange(), icon, Pass.UPDATE_ALL, type.getTooltip(), type.getNavigationHandler(), GutterIconRenderer.Alignment.LEFT); } } } final ASTNode node = element.getNode(); if (node != null && TokenSets.PROPERTY_NAMES.contains(node.getElementType())) { return super.getLineMarkerInfo(((PsiNameIdentifierOwner)parent).getNameIdentifier()); } } //need to draw method separator above docComment if (myDaemonSettings.SHOW_METHOD_SEPARATORS && element.getFirstChild() == null) { PsiElement element1 = element; boolean isMember = false; while (element1 != null && !(element1 instanceof PsiFile) && element1.getPrevSibling() == null) { element1 = element1.getParent(); if (element1 instanceof PsiMember || element1 instanceof GrVariableDeclarationBase) { isMember = true; break; } } if (isMember && !(element1 instanceof PsiAnonymousClass || element1.getParent() instanceof PsiAnonymousClass)) { boolean drawSeparator = false; int category = getGroovyCategory(element1); for (PsiElement child = element1.getPrevSibling(); child != null; child = child.getPrevSibling()) { int category1 = getGroovyCategory(child); if (category1 == 0) continue; drawSeparator = category != 1 || category1 != 1; break; } if (drawSeparator) { GrDocComment comment = null; if (element1 instanceof GrDocCommentOwner) { comment = ((GrDocCommentOwner)element1).getDocComment(); } LineMarkerInfo info = new LineMarkerInfo<PsiElement>(element, comment != null ? comment.getTextRange() : element.getTextRange(), null, Pass.UPDATE_ALL, FunctionUtil.<Object, String>nullConstant(), null, GutterIconRenderer.Alignment.RIGHT); EditorColorsScheme scheme = myColorsManager.getGlobalScheme(); info.separatorColor = scheme.getColor(CodeInsightColors.METHOD_SEPARATORS_COLOR); info.separatorPlacement = SeparatorPlacement.TOP; return info; } } } return super.getLineMarkerInfo(element); } private static int getGroovyCategory(PsiElement element) { if (element instanceof GrVariableDeclarationBase) { GrVariable[] variables = ((GrVariableDeclarationBase)element).getVariables(); if (variables.length == 1 && variables[0] instanceof GrField && variables[0].getInitializerGroovy() instanceof GrClosableBlock) { return 2; } } return JavaLineMarkerProvider.getCategory(element); } @Override public void collectSlowLineMarkers(final List<PsiElement> elements, final Collection<LineMarkerInfo> result) { List<GrField> fields = new ArrayList<GrField>(); for (PsiElement element : elements) { if (!(element instanceof GrField)) continue; fields.add((GrField)element); } collectOverridingMethods(fields, result); super.collectSlowLineMarkers(elements, result); } private static void collectOverridingMethods(final List<GrField> fields, Collection<LineMarkerInfo> result) { final Set<GrField> overridden = new HashSet<GrField>(); final HashSet<GrAccessorMethod> accessors = new HashSet<GrAccessorMethod>(); Set<PsiClass> classes = new THashSet<PsiClass>(); for (GrField field : fields) { ProgressManager.checkCanceled(); final PsiClass parentClass = field.getContainingClass(); if (!"java.lang.Object".equals(parentClass.getQualifiedName())) { classes.add(parentClass); } accessors.addAll(GroovyPropertyUtils.getFieldAccessors(field)); } for (final PsiClass aClass : classes) { AllOverridingMethodsSearch.search(aClass).forEach(new Processor<Pair<PsiMethod, PsiMethod>>() { public boolean process(final Pair<PsiMethod, PsiMethod> pair) { ProgressManager.checkCanceled(); final PsiMethod superMethod = pair.getFirst(); if (isCorrectTarget(superMethod) && isCorrectTarget(pair.getSecond())) { if (accessors.remove(superMethod)) { LOG.assertTrue(superMethod instanceof GrAccessorMethod); overridden.add(((GrAccessorMethod)superMethod).getProperty()); } } return !fields.isEmpty(); } }); } for (GrField field : overridden) { final Icon icon = OVERRIDEN_METHOD_MARKER_RENDERER; PsiElement range; range = field.getNameIdentifierGroovy(); final MarkerType type = OVERRIDEN_PROPERTY_TYPE; LineMarkerInfo info = new LineMarkerInfo<PsiElement>(range, range.getTextRange(), icon, Pass.UPDATE_OVERRIDEN_MARKERS, type.getTooltip(), type.getNavigationHandler(), GutterIconRenderer.Alignment.RIGHT); result.add(info); } } private static boolean isCorrectTarget(PsiMethod superMethod) { final PsiElement navigationElement = superMethod.getNavigationElement(); return superMethod.isPhysical() || navigationElement.isPhysical() && !(navigationElement instanceof PsiClass); } }
silently catch INRE in groovy slow line markers pass
plugins/groovy/src/org/jetbrains/plugins/groovy/codeInsight/GroovyLineMarkerProvider.java
silently catch INRE in groovy slow line markers pass
<ide><path>lugins/groovy/src/org/jetbrains/plugins/groovy/codeInsight/GroovyLineMarkerProvider.java <ide> } <ide> <ide> for (final PsiClass aClass : classes) { <del> AllOverridingMethodsSearch.search(aClass).forEach(new Processor<Pair<PsiMethod, PsiMethod>>() { <del> public boolean process(final Pair<PsiMethod, PsiMethod> pair) { <del> ProgressManager.checkCanceled(); <del> <del> final PsiMethod superMethod = pair.getFirst(); <del> if (isCorrectTarget(superMethod) && isCorrectTarget(pair.getSecond())) { <del> if (accessors.remove(superMethod)) { <del> LOG.assertTrue(superMethod instanceof GrAccessorMethod); <del> overridden.add(((GrAccessorMethod)superMethod).getProperty()); <add> try { <add> AllOverridingMethodsSearch.search(aClass).forEach(new Processor<Pair<PsiMethod, PsiMethod>>() { <add> public boolean process(final Pair<PsiMethod, PsiMethod> pair) { <add> ProgressManager.checkCanceled(); <add> <add> final PsiMethod superMethod = pair.getFirst(); <add> if (isCorrectTarget(superMethod) && isCorrectTarget(pair.getSecond())) { <add> if (accessors.remove(superMethod)) { <add> LOG.assertTrue(superMethod instanceof GrAccessorMethod); <add> overridden.add(((GrAccessorMethod)superMethod).getProperty()); <add> } <ide> } <del> } <del> return !fields.isEmpty(); <del> } <del> }); <add> return !fields.isEmpty(); <add> } <add> }); <add> } <add> catch (IndexNotReadyException ignored) { <add> } <ide> } <ide> <ide> for (GrField field : overridden) {
Java
apache-2.0
b991115e4bb5bbdc11c79e8c792459e1c0abb3a1
0
saki4510t/libcommon,saki4510t/libcommon
package com.serenegiant.glpipeline; /* * libcommon * utility/helper classes for myself * * Copyright (c) 2014-2022 saki [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.opengl.GLES20; import android.util.Log; import com.serenegiant.glutils.EffectDrawer2D; import com.serenegiant.glutils.GLDrawer2D; import com.serenegiant.glutils.GLManager; import com.serenegiant.glutils.GLSurface; import com.serenegiant.glutils.GLUtils; import com.serenegiant.glutils.RendererTarget; import com.serenegiant.math.Fraction; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.Size; import androidx.annotation.WorkerThread; import static com.serenegiant.glutils.GLEffect.EFFECT_NON; /** * OpenGL|ESのシェーダーを使って映像効果付与をするGLPipeline実装 * 描画先のsurfaceにnullを指定すると映像効果を付与したテクスチャを次のGLPipelineへ送る */ public class EffectPipeline extends ProxyPipeline implements GLSurfacePipeline { private static final boolean DEBUG = false; // set false on production private static final String TAG = EffectPipeline.class.getSimpleName(); /** * パイプラインチェーンに含まれるEffectPipelineを取得する * 複数存在する場合は最初に見つかったものを返す * @param pipeline * @return * @deprecated GLPipeline#find(pipeline, clazz)を使う */ @Deprecated @Nullable public static EffectPipeline find(@NonNull final GLPipeline pipeline) { // パイプラインチェーンの先頭を取得 GLPipeline p = GLPipeline.findFirst(pipeline); // EffectPipelineが見つかるまで順番にたどる while (p != null) { if (p instanceof EffectPipeline) { return (EffectPipeline)p; } p = p.getPipeline(); } return null; } //-------------------------------------------------------------------------------- @NonNull private final Object mSync = new Object(); @NonNull private final GLManager mManager; @Nullable private EffectDrawer2D mDrawer; private int mEffect = EFFECT_NON; @Nullable private RendererTarget mRendererTarget; /** * 映像効果付与してそのまま次のGLPipelineへ送るかSurfaceへ描画するか * setSurfaceで有効な描画先Surfaceをセットしていればfalse、セットしていなければtrue */ private volatile boolean mEffectOnly; /** * 映像効果付与してそのまま次のGLPipelineへ送る場合のワーク用GLSurface */ @Nullable private GLSurface work; /** * コンストラクタ * @param manager * @param manager * @throws IllegalStateException * @throws IllegalArgumentException */ public EffectPipeline(@NonNull final GLManager manager) throws IllegalStateException, IllegalArgumentException { this(manager, null, null); } /** * コンストラクタ * @param manager * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @param maxFps 最大フレームレート, nullまたはFraction#ZEROなら制限なし * @throws IllegalStateException * @throws IllegalArgumentException */ public EffectPipeline( @NonNull final GLManager manager, @Nullable final Object surface, @Nullable final Fraction maxFps) throws IllegalStateException, IllegalArgumentException { super(); if (DEBUG) Log.v(TAG, "コンストラクタ:"); if ((surface != null) && !GLUtils.isSupportedSurface(surface)) { throw new IllegalArgumentException("Unsupported surface type!," + surface); } mManager = manager; manager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { createTargetOnGL(surface, maxFps); } }); } @Override protected void internalRelease() { if (DEBUG) Log.v(TAG, "internalRelease:"); if (isValid()) { releaseTarget(); } super.internalRelease(); } /** * ISurfacePipelineの実装 * 描画先のSurfaceを差し替え, 最大フレームレートの制限をしない * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @throws IllegalStateException * @throws IllegalArgumentException */ @Override public void setSurface(@Nullable final Object surface) throws IllegalStateException, IllegalArgumentException { setSurface(surface, null); } /** * ISurfacePipelineの実装 * 描画先のSurfaceを差し替え * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @param maxFps 最大フレームレート, nullまたはFraction#ZEROなら制限なし * @throws IllegalStateException * @throws IllegalArgumentException */ @Override public void setSurface( @Nullable final Object surface, @Nullable final Fraction maxFps) throws IllegalStateException, IllegalArgumentException { if (DEBUG) Log.v(TAG, "setSurface:" + surface); if (!isValid()) { throw new IllegalStateException("already released?"); } if ((surface != null) && !GLUtils.isSupportedSurface(surface)) { throw new IllegalArgumentException("Unsupported surface type!," + surface); } mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { createTargetOnGL(surface, maxFps); } }); } /** * 描画先のSurfaceをセットしているかどうか * #isEffectOnlyの符号反転したのものと実質的には同じ * @return */ @Override public boolean hasSurface() { synchronized (mSync) { return mRendererTarget != null; } } /** * セットされているSurface識別用のidを取得 * @return Surfaceがセットされていればそのid(#hashCode)、セットされていなければ0を返す */ @Override public int getId() { synchronized (mSync) { return mRendererTarget != null ? mRendererTarget.getId() : 0; } } @Override public boolean isValid() { return super.isValid() && mManager.isValid(); } /** * 映像効果付与をSurfaceへせずに次のGLPipelineへ送るだけかどうか * コンストラクタまたはsetSurfaceで描画先のsurfaceにnullを指定するとtrue * @return */ public boolean isEffectOnly() { return mEffectOnly; } private int cnt; @WorkerThread @Override public void onFrameAvailable( final boolean isOES, final int texId, @NonNull @Size(min=16) final float[] texMatrix) { if (isValid()) { @NonNull final EffectDrawer2D drawer; @Nullable final RendererTarget target; if ((mDrawer == null) || (isOES != mDrawer.isOES())) { // 初回またはGLPipelineを繋ぎ変えたあとにテクスチャが変わるかもしれない if (mDrawer != null) { mDrawer.release(); } if (DEBUG) Log.v(TAG, "onFrameAvailable:create GLDrawer2D"); mDrawer = new EffectDrawer2D(mManager.isGLES3(), isOES, mEffectListener); mDrawer.setEffect(mEffect); } drawer = mDrawer; synchronized (mSync) { target = mRendererTarget; } if ((target != null) && target.canDraw()) { target.draw(drawer.getDrawer(), GLES20.GL_TEXTURE0, texId, texMatrix); } if (mEffectOnly && (work != null)) { if (DEBUG && (++cnt % 100) == 0) { Log.v(TAG, "onFrameAvailable:effectOnly," + cnt); } // 映像効果付与したテクスチャを次へ渡す super.onFrameAvailable(work.isOES(), work.getTexId(), work.getTexMatrix()); } else { if (DEBUG && (++cnt % 100) == 0) { Log.v(TAG, "onFrameAvailable:" + cnt); } // こっちはオリジナルのテクスチャを渡す super.onFrameAvailable(isOES, texId, texMatrix); } } } @Override public void refresh() { super.refresh(); // XXX #removeでパイプラインチェーンのどれかを削除するとなぜか映像が表示されなくなってしまうことへのワークアラウンド // XXX パイプライン中のどれかでシェーダーを再生成すると表示されるようになる if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (DEBUG) Log.v(TAG, "refresh#run:release drawer"); EffectDrawer2D drawer = mDrawer; mDrawer = null; if (drawer != null) { synchronized (mSync) { mEffect = drawer.getCurrentEffect(); } drawer.release(); } } }); } } //-------------------------------------------------------------------------------- /** * 映像効果をリセット * @throws IllegalStateException */ public void resetEffect() throws IllegalStateException { if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (mDrawer != null) { mDrawer.resetEffect(); synchronized (mSync) { mEffect = mDrawer.getCurrentEffect(); } } } }); } else { throw new IllegalStateException("already released!"); } } /** * 映像効果をセット * @param effect * @throws IllegalStateException */ public void setEffect(final int effect) throws IllegalStateException { if (DEBUG) Log.v(TAG, "setEffect:" + effect); if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (DEBUG) Log.v(TAG, "setEffect#run:" + effect); if (mDrawer != null) { mDrawer.setEffect(effect); synchronized (mSync) { mEffect = mDrawer.getCurrentEffect(); } } } }); } else { throw new IllegalStateException("already released!"); } } public int getCurrentEffect() { if (DEBUG) Log.v(TAG, "getCurrentEffect:" + mDrawer.getCurrentEffect()); synchronized (mSync) { return mEffect; } } //-------------------------------------------------------------------------------- final EffectDrawer2D.EffectListener mEffectListener = new EffectDrawer2D.EffectListener() { @WorkerThread @Override public boolean onChangeEffect(final int effect, @NonNull final GLDrawer2D drawer) { return EffectPipeline.this.onChangeEffect(effect, drawer); } }; /** * 描画先のSurfaceを生成 * @param surface * @param maxFps */ @WorkerThread private void createTargetOnGL(@Nullable final Object surface, @Nullable final Fraction maxFps) { if (DEBUG) Log.v(TAG, "createTarget:" + surface); synchronized (mSync) { if ((mRendererTarget == null) || (mRendererTarget.getSurface() != surface)) { if (mRendererTarget != null) { mRendererTarget.release(); mRendererTarget = null; } if (work != null) { work.release(); work = null; } if (GLUtils.isSupportedSurface(surface)) { mRendererTarget = RendererTarget.newInstance( mManager.getEgl(), surface, maxFps != null ? maxFps.asFloat() : 0); mEffectOnly = false; } else { if (DEBUG) Log.v(TAG, "createTarget:create GLSurface as work texture"); work = GLSurface.newInstance(mManager.isGLES3(), getWidth(), getHeight()); mRendererTarget = RendererTarget.newInstance( mManager.getEgl(), work, maxFps != null ? maxFps.asFloat() : 0); mEffectOnly = true; } } } } private void releaseTarget() { final EffectDrawer2D drawer = mDrawer; final RendererTarget target; final GLSurface w; mDrawer = null; synchronized (mSync) { target = mRendererTarget; mRendererTarget = null; w = work; work = null; } if ((drawer != null) || (target != null)) { if (DEBUG) Log.v(TAG, "releaseTarget:"); if (mManager.isValid()) { try { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (drawer != null) { if (DEBUG) Log.v(TAG, "releaseTarget:release drawer"); drawer.release(); } if (target != null) { if (DEBUG) Log.v(TAG, "releaseTarget:release target"); target.release(); } if (w != null) { w.release(); } } }); } catch (final Exception e) { if (DEBUG) Log.w(TAG, e); } } else if (DEBUG) { Log.w(TAG, "releaseTarget:unexpectedly GLManager is already released!"); } } } @WorkerThread protected boolean onChangeEffect(final int effect, @NonNull final GLDrawer2D drawer) { return false; } }
common/src/main/java/com/serenegiant/glpipeline/EffectPipeline.java
package com.serenegiant.glpipeline; /* * libcommon * utility/helper classes for myself * * Copyright (c) 2014-2022 saki [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.opengl.GLES20; import android.util.Log; import com.serenegiant.glutils.EffectDrawer2D; import com.serenegiant.glutils.GLDrawer2D; import com.serenegiant.glutils.GLManager; import com.serenegiant.glutils.GLSurface; import com.serenegiant.glutils.GLUtils; import com.serenegiant.glutils.RendererTarget; import com.serenegiant.math.Fraction; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.Size; import androidx.annotation.WorkerThread; import static com.serenegiant.glutils.GLEffect.EFFECT_NON; /** * OpenGL|ESのシェーダーを使って映像効果付与をするGLPipeline実装 * 描画先のsurfaceにnullを指定すると映像効果を付与したテクスチャを次のGLPipelineへ送る */ public class EffectPipeline extends ProxyPipeline implements GLSurfacePipeline { private static final boolean DEBUG = false; // set false on production private static final String TAG = EffectPipeline.class.getSimpleName(); /** * パイプラインチェーンに含まれるEffectPipelineを取得する * 複数存在する場合は最初に見つかったものを返す * @param pipeline * @return * @deprecated GLPipeline#find(pipeline, clazz)を使う */ @Deprecated @Nullable public static EffectPipeline find(@NonNull final GLPipeline pipeline) { // パイプラインチェーンの先頭を取得 GLPipeline p = GLPipeline.findFirst(pipeline); // EffectPipelineが見つかるまで順番にたどる while (p != null) { if (p instanceof EffectPipeline) { return (EffectPipeline)p; } p = p.getPipeline(); } return null; } //-------------------------------------------------------------------------------- @NonNull private final Object mSync = new Object(); @NonNull private final GLManager mManager; @Nullable private EffectDrawer2D mDrawer; private int mEffect = EFFECT_NON; @Nullable private RendererTarget mRendererTarget; /** * 映像効果付与してそのまま次のGLPipelineへ送るかSurfaceへ描画するか * setSurfaceで有効な描画先Surfaceをセットしていればfalse、セットしていなければtrue */ private volatile boolean mEffectOnly; /** * 映像効果付与してそのまま次のGLPipelineへ送る場合のワーク用GLSurface */ @Nullable private GLSurface work; /** * コンストラクタ * @param manager * @param manager * @throws IllegalStateException * @throws IllegalArgumentException */ public EffectPipeline(@NonNull final GLManager manager) throws IllegalStateException, IllegalArgumentException { this(manager, null, null); } /** * コンストラクタ * @param manager * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @param maxFps 最大フレームレート, nullまたはFraction#ZEROなら制限なし * @throws IllegalStateException * @throws IllegalArgumentException */ public EffectPipeline( @NonNull final GLManager manager, @Nullable final Object surface, @Nullable final Fraction maxFps) throws IllegalStateException, IllegalArgumentException { super(); if (DEBUG) Log.v(TAG, "コンストラクタ:"); if ((surface != null) && !GLUtils.isSupportedSurface(surface)) { throw new IllegalArgumentException("Unsupported surface type!," + surface); } mManager = manager; manager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { createTarget(surface, maxFps); } }); } @Override protected void internalRelease() { if (DEBUG) Log.v(TAG, "internalRelease:"); if (isValid()) { releaseTarget(); } super.internalRelease(); } /** * ISurfacePipelineの実装 * 描画先のSurfaceを差し替え, 最大フレームレートの制限をしない * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @throws IllegalStateException * @throws IllegalArgumentException */ @Override public void setSurface(@Nullable final Object surface) throws IllegalStateException, IllegalArgumentException { setSurface(surface, null); } /** * ISurfacePipelineの実装 * 描画先のSurfaceを差し替え * 対応していないSurface形式の場合はIllegalArgumentExceptionを投げる * @param surface nullまたはSurface/SurfaceHolder/SurfaceTexture/SurfaceView * @param maxFps 最大フレームレート, nullまたはFraction#ZEROなら制限なし * @throws IllegalStateException * @throws IllegalArgumentException */ @Override public void setSurface( @Nullable final Object surface, @Nullable final Fraction maxFps) throws IllegalStateException, IllegalArgumentException { if (DEBUG) Log.v(TAG, "setSurface:" + surface); if (!isValid()) { throw new IllegalStateException("already released?"); } if ((surface != null) && !GLUtils.isSupportedSurface(surface)) { throw new IllegalArgumentException("Unsupported surface type!," + surface); } mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { createTarget(surface, maxFps); } }); } /** * 描画先のSurfaceをセットしているかどうか * #isEffectOnlyの符号反転したのものと実質的には同じ * @return */ @Override public boolean hasSurface() { synchronized (mSync) { return mRendererTarget != null; } } /** * セットされているSurface識別用のidを取得 * @return Surfaceがセットされていればそのid(#hashCode)、セットされていなければ0を返す */ @Override public int getId() { synchronized (mSync) { return mRendererTarget != null ? mRendererTarget.getId() : 0; } } @Override public boolean isValid() { return super.isValid() && mManager.isValid(); } /** * 映像効果付与をSurfaceへせずに次のGLPipelineへ送るだけかどうか * コンストラクタまたはsetSurfaceで描画先のsurfaceにnullを指定するとtrue * @return */ public boolean isEffectOnly() { return mEffectOnly; } private int cnt; @WorkerThread @Override public void onFrameAvailable( final boolean isOES, final int texId, @NonNull @Size(min=16) final float[] texMatrix) { if (isValid()) { @NonNull final EffectDrawer2D drawer; @Nullable final RendererTarget target; if ((mDrawer == null) || (isOES != mDrawer.isOES())) { // 初回またはGLPipelineを繋ぎ変えたあとにテクスチャが変わるかもしれない if (mDrawer != null) { mDrawer.release(); } if (DEBUG) Log.v(TAG, "onFrameAvailable:create GLDrawer2D"); mDrawer = new EffectDrawer2D(mManager.isGLES3(), isOES, mEffectListener); mDrawer.setEffect(mEffect); } drawer = mDrawer; synchronized (mSync) { target = mRendererTarget; } if ((target != null) && target.canDraw()) { target.draw(drawer.getDrawer(), GLES20.GL_TEXTURE0, texId, texMatrix); } if (mEffectOnly && (work != null)) { if (DEBUG && (++cnt % 100) == 0) { Log.v(TAG, "onFrameAvailable:effectOnly," + cnt); } // 映像効果付与したテクスチャを次へ渡す super.onFrameAvailable(work.isOES(), work.getTexId(), work.getTexMatrix()); } else { if (DEBUG && (++cnt % 100) == 0) { Log.v(TAG, "onFrameAvailable:" + cnt); } // こっちはオリジナルのテクスチャを渡す super.onFrameAvailable(isOES, texId, texMatrix); } } } @Override public void refresh() { super.refresh(); // XXX #removeでパイプラインチェーンのどれかを削除するとなぜか映像が表示されなくなってしまうことへのワークアラウンド // XXX パイプライン中のどれかでシェーダーを再生成すると表示されるようになる if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (DEBUG) Log.v(TAG, "refresh#run:release drawer"); EffectDrawer2D drawer = mDrawer; mDrawer = null; if (drawer != null) { synchronized (mSync) { mEffect = drawer.getCurrentEffect(); } drawer.release(); } } }); } } //-------------------------------------------------------------------------------- /** * 映像効果をリセット * @throws IllegalStateException */ public void resetEffect() throws IllegalStateException { if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (mDrawer != null) { mDrawer.resetEffect(); synchronized (mSync) { mEffect = mDrawer.getCurrentEffect(); } } } }); } else { throw new IllegalStateException("already released!"); } } /** * 映像効果をセット * @param effect * @throws IllegalStateException */ public void setEffect(final int effect) throws IllegalStateException { if (DEBUG) Log.v(TAG, "setEffect:" + effect); if (isValid()) { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (DEBUG) Log.v(TAG, "setEffect#run:" + effect); if (mDrawer != null) { mDrawer.setEffect(effect); synchronized (mSync) { mEffect = mDrawer.getCurrentEffect(); } } } }); } else { throw new IllegalStateException("already released!"); } } public int getCurrentEffect() { if (DEBUG) Log.v(TAG, "getCurrentEffect:" + mDrawer.getCurrentEffect()); synchronized (mSync) { return mEffect; } } //-------------------------------------------------------------------------------- final EffectDrawer2D.EffectListener mEffectListener = new EffectDrawer2D.EffectListener() { @WorkerThread @Override public boolean onChangeEffect(final int effect, @NonNull final GLDrawer2D drawer) { return EffectPipeline.this.onChangeEffect(effect, drawer); } }; /** * 描画先のSurfaceを生成 * @param surface * @param maxFps */ @WorkerThread private void createTarget(@Nullable final Object surface, @Nullable final Fraction maxFps) { if (DEBUG) Log.v(TAG, "createTarget:" + surface); synchronized (mSync) { if ((mRendererTarget == null) || (mRendererTarget.getSurface() != surface)) { if (mRendererTarget != null) { mRendererTarget.release(); mRendererTarget = null; } if (work != null) { work.release(); work = null; } if (GLUtils.isSupportedSurface(surface)) { mRendererTarget = RendererTarget.newInstance( mManager.getEgl(), surface, maxFps != null ? maxFps.asFloat() : 0); mEffectOnly = false; } else { if (DEBUG) Log.v(TAG, "createTarget:create GLSurface as work texture"); work = GLSurface.newInstance(mManager.isGLES3(), getWidth(), getHeight()); mRendererTarget = RendererTarget.newInstance( mManager.getEgl(), work, maxFps != null ? maxFps.asFloat() : 0); mEffectOnly = true; } } } } private void releaseTarget() { final EffectDrawer2D drawer = mDrawer; final RendererTarget target; final GLSurface w; mDrawer = null; synchronized (mSync) { target = mRendererTarget; mRendererTarget = null; w = work; work = null; } if ((drawer != null) || (target != null)) { if (DEBUG) Log.v(TAG, "releaseTarget:"); if (mManager.isValid()) { try { mManager.runOnGLThread(new Runnable() { @WorkerThread @Override public void run() { if (drawer != null) { if (DEBUG) Log.v(TAG, "releaseTarget:release drawer"); drawer.release(); } if (target != null) { if (DEBUG) Log.v(TAG, "releaseTarget:release target"); target.release(); } if (w != null) { w.release(); } } }); } catch (final Exception e) { if (DEBUG) Log.w(TAG, e); } } else if (DEBUG) { Log.w(TAG, "releaseTarget:unexpectedly GLManager is already released!"); } } } @WorkerThread protected boolean onChangeEffect(final int effect, @NonNull final GLDrawer2D drawer) { return false; } }
EffectPipeline#createTargetを他のクラスに合わせてcreateTargetOnGLにリネーム
common/src/main/java/com/serenegiant/glpipeline/EffectPipeline.java
EffectPipeline#createTargetを他のクラスに合わせてcreateTargetOnGLにリネーム
<ide><path>ommon/src/main/java/com/serenegiant/glpipeline/EffectPipeline.java <ide> @WorkerThread <ide> @Override <ide> public void run() { <del> createTarget(surface, maxFps); <add> createTargetOnGL(surface, maxFps); <ide> } <ide> }); <ide> } <ide> @WorkerThread <ide> @Override <ide> public void run() { <del> createTarget(surface, maxFps); <add> createTargetOnGL(surface, maxFps); <ide> } <ide> }); <ide> } <ide> * @param maxFps <ide> */ <ide> @WorkerThread <del> private void createTarget(@Nullable final Object surface, @Nullable final Fraction maxFps) { <add> private void createTargetOnGL(@Nullable final Object surface, @Nullable final Fraction maxFps) { <ide> if (DEBUG) Log.v(TAG, "createTarget:" + surface); <ide> synchronized (mSync) { <ide> if ((mRendererTarget == null) || (mRendererTarget.getSurface() != surface)) {
Java
apache-2.0
error: pathspec 'src/main/java/com/sensei/search/nodes/SenseiSysBroker.java' did not match any file(s) known to git
587a879dac104c7140f9c19d2256553b60de3670
1
senseidb/sensei,javasoze/sensei,javasoze/sensei,DataDog/sensei,senseidb/sensei,javasoze/sensei,DataDog/sensei,senseidb/sensei,javasoze/sensei,senseidb/sensei,DataDog/sensei,DataDog/sensei
package com.sensei.search.nodes; import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.ints.IntSet; import java.util.Comparator; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import com.linkedin.norbert.NorbertException; import com.linkedin.norbert.javacompat.cluster.ClusterClient; import com.linkedin.norbert.javacompat.cluster.Node; import com.linkedin.norbert.javacompat.network.PartitionedNetworkClient; import com.sensei.search.cluster.routing.SenseiLoadBalancerFactory; import com.sensei.search.req.SenseiRequest; import com.sensei.search.req.SenseiSystemInfo; import com.sensei.search.req.protobuf.SenseiSysRequestBPO; import com.sensei.search.req.protobuf.SenseiSysRequestBPOConverter; import com.sensei.search.req.protobuf.SenseiSysResultBPO; public class SenseiSysBroker extends AbstractConsistentHashBroker<SenseiRequest, SenseiSystemInfo, SenseiSysRequestBPO.SysRequest, SenseiSysResultBPO.SysResult> { private final static Logger logger = Logger.getLogger(SenseiSysBroker.class); private final static long TIMEOUT_MILLIS = 8000L; private long _timeoutMillis = TIMEOUT_MILLIS; private final Comparator<String> _versionComparator; public SenseiSysBroker(PartitionedNetworkClient<Integer> networkClient, ClusterClient clusterClient, SenseiLoadBalancerFactory loadBalancerFactory, Comparator<String> versionComparator) throws NorbertException { super(networkClient, clusterClient, SenseiSysRequestBPO.SysRequest.getDefaultInstance(), SenseiSysResultBPO.SysResult.getDefaultInstance(),loadBalancerFactory); _versionComparator = versionComparator; logger.info("created broker instance " + networkClient + " " + clusterClient + " " + loadBalancerFactory); } @Override public SenseiSystemInfo mergeResults(SenseiRequest request, List<SenseiSystemInfo> resultList) { SenseiSystemInfo result = new SenseiSystemInfo(); if (resultList == null) return result; for (SenseiSystemInfo res : resultList) { result.setNumDocs(result.getNumDocs()+res.getNumDocs()); if (result.getLastModified() < res.getLastModified()) result.setLastModified(res.getLastModified()); if (result.getVersion() == null || _versionComparator.compare(result.getVersion(), res.getVersion()) < 0) result.setVersion(res.getVersion()); if (res.getFacetInfos() != null) result.setFacetInfos(res.getFacetInfos()); if (res.getClusterInfo() != null) { if (result.getClusterInfo() != null) result.getClusterInfo().putAll(res.getClusterInfo()); else result.setClusterInfo(res.getClusterInfo()); } } return result; } @Override public String getRouteParam(SenseiRequest req) { return req.getRouteParam(); } @Override public SenseiSystemInfo getEmptyResultInstance() { return new SenseiSystemInfo(); } @Override public SenseiSystemInfo messageToResult(SenseiSysResultBPO.SysResult message) { return SenseiSysRequestBPOConverter.convert(message); } @Override public SenseiSysRequestBPO.SysRequest requestToMessage(SenseiRequest request) { return SenseiSysRequestBPOConverter.convert(request); } @Override public void setTimeoutMillis(long timeoutMillis){ _timeoutMillis = timeoutMillis; } @Override public long getTimeoutMillis(){ return _timeoutMillis; } private IntSet getPartitions(Set<Node> nodes) { IntSet partitionSet = new IntOpenHashSet(); for (Node n : nodes) { partitionSet.addAll(n.getPartitionIds()); } return partitionSet; } public void handleClusterConnected(Set<Node> nodes) { _loadBalancer = _loadBalancerFactory.newLoadBalancer(nodes); _partitions = getPartitions(nodes); logger.info("handleClusterConnected(): Received the list of nodes from norbert " + nodes.toString()); logger.info("handleClusterConnected(): Received the list of partitions from router " + _partitions.toString()); } public void handleClusterDisconnected() { logger.info("handleClusterDisconnected() called"); _partitions = new IntOpenHashSet(); } public void handleClusterNodesChanged(Set<Node> nodes) { _loadBalancer = _loadBalancerFactory.newLoadBalancer(nodes); _partitions = getPartitions(nodes); logger.info("handleClusterNodesChanged(): Received the list of nodes from norbert " + nodes.toString()); logger.info("handleClusterNodesChanged(): Received the list of partitions from router " + _partitions.toString()); } @Override public void handleClusterShutdown() { logger.info("handleClusterShutdown() called"); } }
src/main/java/com/sensei/search/nodes/SenseiSysBroker.java
Upload missing files.
src/main/java/com/sensei/search/nodes/SenseiSysBroker.java
Upload missing files.
<ide><path>rc/main/java/com/sensei/search/nodes/SenseiSysBroker.java <add>package com.sensei.search.nodes; <add> <add>import it.unimi.dsi.fastutil.ints.IntOpenHashSet; <add>import it.unimi.dsi.fastutil.ints.IntSet; <add> <add>import java.util.Comparator; <add>import java.util.List; <add>import java.util.Set; <add> <add>import org.apache.log4j.Logger; <add> <add>import com.linkedin.norbert.NorbertException; <add>import com.linkedin.norbert.javacompat.cluster.ClusterClient; <add>import com.linkedin.norbert.javacompat.cluster.Node; <add>import com.linkedin.norbert.javacompat.network.PartitionedNetworkClient; <add>import com.sensei.search.cluster.routing.SenseiLoadBalancerFactory; <add>import com.sensei.search.req.SenseiRequest; <add>import com.sensei.search.req.SenseiSystemInfo; <add>import com.sensei.search.req.protobuf.SenseiSysRequestBPO; <add>import com.sensei.search.req.protobuf.SenseiSysRequestBPOConverter; <add>import com.sensei.search.req.protobuf.SenseiSysResultBPO; <add> <add>public class SenseiSysBroker extends AbstractConsistentHashBroker<SenseiRequest, SenseiSystemInfo, SenseiSysRequestBPO.SysRequest, SenseiSysResultBPO.SysResult> <add>{ <add> private final static Logger logger = Logger.getLogger(SenseiSysBroker.class); <add> private final static long TIMEOUT_MILLIS = 8000L; <add> <add> private long _timeoutMillis = TIMEOUT_MILLIS; <add> private final Comparator<String> _versionComparator; <add> <add> public SenseiSysBroker(PartitionedNetworkClient<Integer> networkClient, ClusterClient clusterClient, <add> SenseiLoadBalancerFactory loadBalancerFactory, Comparator<String> versionComparator) throws NorbertException <add> { <add> super(networkClient, clusterClient, SenseiSysRequestBPO.SysRequest.getDefaultInstance(), SenseiSysResultBPO.SysResult.getDefaultInstance(),loadBalancerFactory); <add> _versionComparator = versionComparator; <add> logger.info("created broker instance " + networkClient + " " + clusterClient + " " + loadBalancerFactory); <add> } <add> <add> @Override <add> public SenseiSystemInfo mergeResults(SenseiRequest request, List<SenseiSystemInfo> resultList) <add> { <add> SenseiSystemInfo result = new SenseiSystemInfo(); <add> if (resultList == null) <add> return result; <add> <add> for (SenseiSystemInfo res : resultList) <add> { <add> result.setNumDocs(result.getNumDocs()+res.getNumDocs()); <add> if (result.getLastModified() < res.getLastModified()) <add> result.setLastModified(res.getLastModified()); <add> if (result.getVersion() == null || _versionComparator.compare(result.getVersion(), res.getVersion()) < 0) <add> result.setVersion(res.getVersion()); <add> if (res.getFacetInfos() != null) <add> result.setFacetInfos(res.getFacetInfos()); <add> if (res.getClusterInfo() != null) { <add> if (result.getClusterInfo() != null) <add> result.getClusterInfo().putAll(res.getClusterInfo()); <add> else <add> result.setClusterInfo(res.getClusterInfo()); <add> } <add> } <add> <add> return result; <add> } <add> <add> @Override <add> public String getRouteParam(SenseiRequest req) <add> { <add> return req.getRouteParam(); <add> } <add> <add> @Override <add> public SenseiSystemInfo getEmptyResultInstance() <add> { <add> return new SenseiSystemInfo(); <add> } <add> <add> @Override <add> public SenseiSystemInfo messageToResult(SenseiSysResultBPO.SysResult message) <add> { <add> return SenseiSysRequestBPOConverter.convert(message); <add> } <add> <add> @Override <add> public SenseiSysRequestBPO.SysRequest requestToMessage(SenseiRequest request) <add> { <add> return SenseiSysRequestBPOConverter.convert(request); <add> } <add> <add> @Override <add> public void setTimeoutMillis(long timeoutMillis){ <add> _timeoutMillis = timeoutMillis; <add> } <add> <add> @Override <add> public long getTimeoutMillis(){ <add> return _timeoutMillis; <add> } <add> <add> private IntSet getPartitions(Set<Node> nodes) <add> { <add> IntSet partitionSet = new IntOpenHashSet(); <add> for (Node n : nodes) <add> { <add> partitionSet.addAll(n.getPartitionIds()); <add> } <add> return partitionSet; <add> } <add> <add> public void handleClusterConnected(Set<Node> nodes) <add> { <add> _loadBalancer = _loadBalancerFactory.newLoadBalancer(nodes); <add> _partitions = getPartitions(nodes); <add> logger.info("handleClusterConnected(): Received the list of nodes from norbert " + nodes.toString()); <add> logger.info("handleClusterConnected(): Received the list of partitions from router " + _partitions.toString()); <add> } <add> <add> public void handleClusterDisconnected() <add> { <add> logger.info("handleClusterDisconnected() called"); <add> _partitions = new IntOpenHashSet(); <add> } <add> <add> public void handleClusterNodesChanged(Set<Node> nodes) <add> { <add> _loadBalancer = _loadBalancerFactory.newLoadBalancer(nodes); <add> _partitions = getPartitions(nodes); <add> logger.info("handleClusterNodesChanged(): Received the list of nodes from norbert " + nodes.toString()); <add> logger.info("handleClusterNodesChanged(): Received the list of partitions from router " + _partitions.toString()); <add> } <add> <add> @Override <add> public void handleClusterShutdown() <add> { <add> logger.info("handleClusterShutdown() called"); <add> } <add>} <add>
Java
agpl-3.0
b56d9f126f93a43b510fdc57b6a3547fcdccef57
0
Metatavu/kunta-api-server,Metatavu/kunta-api-server,Metatavu/kunta-api-server
package fi.metatavu.kuntaapi.server.integrations.management.tasks; import javax.enterprise.context.ApplicationScoped; import fi.metatavu.kuntaapi.server.id.PageId; import fi.metatavu.kuntaapi.server.tasks.IdTask; import fi.metatavu.kuntaapi.server.tasks.jms.AbstractJmsTaskQueue; @ApplicationScoped public class PageIdTaskQueue extends AbstractJmsTaskQueue<IdTask<PageId>> { public static final String NAME = "management-pages"; public static final String JMS_QUEUE = JMS_QUEUE_PREFIX + NAME; @Override public String getName() { return NAME; } }
src/main/java/fi/metatavu/kuntaapi/server/integrations/management/tasks/PageIdTaskQueue.java
package fi.metatavu.kuntaapi.server.integrations.management.tasks; import javax.enterprise.context.ApplicationScoped; import fi.metatavu.kuntaapi.server.id.PageId; import fi.metatavu.kuntaapi.server.tasks.IdTask; import fi.metatavu.kuntaapi.server.tasks.jms.AbstractJmsTaskQueue; @ApplicationScoped public class PageIdTaskQueue extends AbstractJmsTaskQueue<IdTask<PageId>> { public static final String NAME = "management-pages"; public static final String JMS_QUEUE = JMS_QUEUE_PREFIX + NAME; @Override public String getName() { return NAME; } }
Removed extra space from PageIdTaskQueue
src/main/java/fi/metatavu/kuntaapi/server/integrations/management/tasks/PageIdTaskQueue.java
Removed extra space from PageIdTaskQueue
<ide><path>rc/main/java/fi/metatavu/kuntaapi/server/integrations/management/tasks/PageIdTaskQueue.java <ide> import fi.metatavu.kuntaapi.server.tasks.jms.AbstractJmsTaskQueue; <ide> <ide> @ApplicationScoped <del>public class PageIdTaskQueue extends AbstractJmsTaskQueue<IdTask<PageId>> { <add>public class PageIdTaskQueue extends AbstractJmsTaskQueue<IdTask<PageId>> { <ide> <ide> public static final String NAME = "management-pages"; <ide> public static final String JMS_QUEUE = JMS_QUEUE_PREFIX + NAME;
Java
bsd-2-clause
65279c9d1158fa247e406d6146ef8395d39906c6
0
jenkinsci/p4-plugin,jenkinsci/p4-plugin,jenkinsci/p4-plugin
package org.jenkinsci.plugins.p4.build; import hudson.EnvVars; import hudson.Extension; import hudson.model.Descriptor; import hudson.model.EnvironmentContributor; import hudson.model.Run; import hudson.model.TaskListener; import hudson.scm.SCM; import jenkins.model.Jenkins; import org.apache.commons.lang.StringUtils; import org.jenkinsci.plugins.p4.PerforceScm; import org.jenkinsci.plugins.p4.review.P4Review; import org.jenkinsci.plugins.p4.tagging.TagAction; import java.io.File; import java.io.IOException; import java.util.Map; @Extension() public class P4EnvironmentContributor extends EnvironmentContributor { @Override public void buildEnvironmentFor(Run run, EnvVars env, TaskListener listener) throws IOException, InterruptedException { TagAction tagAction = TagAction.getLastAction(run); buildEnvironment(tagAction, env); File changelogFile = getCurrentChangelogFile(run.getRootDir()); String changelogFilename = changelogFile.getAbsolutePath(); env.put("HUDSON_CHANGELOG_FILE", StringUtils.defaultIfBlank(changelogFilename, "Not-set")); } public static void buildEnvironment(TagAction tagAction, Map<String, String> map) { // parts of Jenkins passes EnvVars as Map<String,String> EnvVars env = new EnvVars(map); buildEnvironment(tagAction, env); map.putAll(env); } private static void buildEnvironment(TagAction tagAction, EnvVars env) { if (tagAction == null) { return; } // Set P4_CHANGELIST value if (tagAction.getRefChanges() != null) { String change = tagAction.getRefChange().toString(); env.put("P4_CHANGELIST", change); } // Set P4_CLIENT workspace value if (tagAction.getClient() != null) { String client = tagAction.getClient(); env.put("P4_CLIENT", client); } // Set P4_PORT connection if (tagAction.getPort() != null) { String port = tagAction.getPort(); env.put("P4_PORT", port); } // Set P4_USER connection if (tagAction.getUser() != null) { String user = tagAction.getUser(); env.put("P4_USER", user); } // Set P4_REVIEW connection if (tagAction.getReview() != null) { P4Review review = tagAction.getReview(); env.put("P4_REVIEW", review.getId()); env.put("P4_REVIEW_TYPE", review.getStatus().toString()); } // Set P4_TICKET connection Jenkins j = Jenkins.getInstance(); if (j != null) { @SuppressWarnings("unchecked") Descriptor<SCM> scm = j.getDescriptor(PerforceScm.class); PerforceScm.DescriptorImpl p4scm = (PerforceScm.DescriptorImpl) scm; if (tagAction.getTicket() != null && !p4scm.isHideTicket()) { String ticket = tagAction.getTicket(); env.put("P4_TICKET", ticket); } } } private File getCurrentChangelogFile(File rootDir) { File changelogFile; int i = 0; File next = new File(rootDir, "changelog" + i + ".xml"); do { changelogFile = next; next = new File(rootDir, "changelog" + i + ".xml"); i++; } while (next.exists()); return changelogFile; } }
src/main/java/org/jenkinsci/plugins/p4/build/P4EnvironmentContributor.java
package org.jenkinsci.plugins.p4.build; import hudson.EnvVars; import hudson.Extension; import hudson.model.Descriptor; import hudson.model.EnvironmentContributor; import hudson.model.Run; import hudson.model.TaskListener; import hudson.scm.SCM; import jenkins.model.Jenkins; import org.apache.commons.lang.StringUtils; import org.jenkinsci.plugins.p4.PerforceScm; import org.jenkinsci.plugins.p4.review.P4Review; import org.jenkinsci.plugins.p4.tagging.TagAction; import java.io.File; import java.io.IOException; import java.util.Map; @Extension() public class P4EnvironmentContributor extends EnvironmentContributor { @Override public void buildEnvironmentFor(Run run, EnvVars env, TaskListener listener) throws IOException, InterruptedException { TagAction tagAction = TagAction.getLastAction(run); buildEnvironment(tagAction, env); File changelogFile = getCurrentChangelogFile(run.getRootDir()); String changelogFilename = changelogFile.getAbsolutePath(); env.put("HUDSON_CHANGELOG_FILE", StringUtils.defaultIfBlank(changelogFilename, "Not-set")); } public static void buildEnvironment(TagAction tagAction, Map<String, String> map) { // parts of Jenkins passes EnvVars as Map<String,String> if (map instanceof EnvVars) { EnvVars env = (EnvVars) map; buildEnvironment(tagAction, env); } } public static void buildEnvironment(TagAction tagAction, EnvVars env) { if (tagAction == null) { return; } // Set P4_CHANGELIST value if (tagAction.getRefChanges() != null) { String change = tagAction.getRefChange().toString(); env.put("P4_CHANGELIST", change); } // Set P4_CLIENT workspace value if (tagAction.getClient() != null) { String client = tagAction.getClient(); env.put("P4_CLIENT", client); } // Set P4_PORT connection if (tagAction.getPort() != null) { String port = tagAction.getPort(); env.put("P4_PORT", port); } // Set P4_USER connection if (tagAction.getUser() != null) { String user = tagAction.getUser(); env.put("P4_USER", user); } // Set P4_REVIEW connection if (tagAction.getReview() != null) { P4Review review = tagAction.getReview(); env.put("P4_REVIEW", review.getId()); env.put("P4_REVIEW_TYPE", review.getStatus().toString()); } // Set P4_TICKET connection Jenkins j = Jenkins.getInstance(); if (j != null) { @SuppressWarnings("unchecked") Descriptor<SCM> scm = j.getDescriptor(PerforceScm.class); PerforceScm.DescriptorImpl p4scm = (PerforceScm.DescriptorImpl) scm; if (tagAction.getTicket() != null && !p4scm.isHideTicket()) { String ticket = tagAction.getTicket(); env.put("P4_TICKET", ticket); } } } private File getCurrentChangelogFile(File rootDir) { File changelogFile; int i = 0; File next = new File(rootDir, "changelog" + i + ".xml"); do { changelogFile = next; next = new File(rootDir, "changelog" + i + ".xml"); i++; } while (next.exists()); return changelogFile; } }
Copy environment map to EnvVars. JENKINS-37584
src/main/java/org/jenkinsci/plugins/p4/build/P4EnvironmentContributor.java
Copy environment map to EnvVars.
<ide><path>rc/main/java/org/jenkinsci/plugins/p4/build/P4EnvironmentContributor.java <ide> <ide> public static void buildEnvironment(TagAction tagAction, Map<String, String> map) { <ide> // parts of Jenkins passes EnvVars as Map<String,String> <del> if (map instanceof EnvVars) { <del> EnvVars env = (EnvVars) map; <del> buildEnvironment(tagAction, env); <del> } <add> EnvVars env = new EnvVars(map); <add> buildEnvironment(tagAction, env); <add> map.putAll(env); <ide> } <ide> <del> public static void buildEnvironment(TagAction tagAction, EnvVars env) { <add> private static void buildEnvironment(TagAction tagAction, EnvVars env) { <ide> if (tagAction == null) { <ide> return; <ide> }
Java
mit
081ed6f2be4d51d844ddd5ecec7809cce09e5d61
0
owlsn/patest
import java.util.Arrays; public class advantageCount{ public int[] advantageCount(int[] A, int[] B) { int len = A.length; int[] temp = new int[len]; Arrays.sort(A); Arrays.sort(B); for(int i = 0; i < len; i++){ temp[i] = A[i] - B[i]; } return temp; } }
competition/leetcode/advantageCount.java
import java.util.Arrays; public class advantageCount{ public static void main(String[] args){ System.out.println("test"); } public int[] advantageCount(int[] A, int[] B) { int len = A.length; int[] temp = new int[len]; Arrays.sort(A); Arrays.sort(B); for(int i = 0; i < len; i++){ temp[i] = A[i] - B[i]; } return temp; } }
ambiguousCoordinates
competition/leetcode/advantageCount.java
ambiguousCoordinates
<ide><path>ompetition/leetcode/advantageCount.java <ide> import java.util.Arrays; <ide> <ide> public class advantageCount{ <del> <del> public static void main(String[] args){ <del> System.out.println("test"); <del> } <ide> <ide> public int[] advantageCount(int[] A, int[] B) { <ide> int len = A.length;
JavaScript
bsd-3-clause
f03fecd758a3e86366e204dad97618f3f4b72476
0
firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire,firebase/flutterfire
const path = require('path'); module.exports = { title: 'FlutterFire', tagline: 'The official Firebase plugins for Flutter', url: 'https://firebase.flutter.dev', baseUrl: '/', favicon: '/favicon/favicon.ico', organizationName: 'FirebaseExtended', projectName: 'flutterfire', themeConfig: { announcementBar: { id: 'wip', content: 'The FlutterFire documentation hub is currently a work in progress. <a rel="noopener" target="_blank" href="https://github.com/FirebaseExtended/flutterfire/issues/2582"><b>Check out the roadmap to learn more.</b></a>.', backgroundColor: '#13B9FD', textColor: '#fff', }, algolia: { apiKey: '61eba190d4380f3db4e11d21b70e7608', indexName: 'flutterfire', }, prism: { additionalLanguages: [ 'dart', 'bash', 'java', 'kotlin', 'objectivec', 'swift', 'groovy', 'ruby', 'json', 'yaml', ], }, navbar: { title: 'FlutterFire', logo: { alt: 'FlutterFire Logo', src: '/img/flutterfire_300x.png', }, links: [ { to: 'docs/overview', activeBasePath: 'docs', label: 'Docs', position: 'right', }, { href: 'https://twitter.com/flutterfiredev', label: 'Twitter', position: 'right', }, { href: 'https://github.com/firebaseextended/flutterfire', label: 'GitHub', position: 'right', }, ], }, footer: { style: 'dark', links: [ { title: 'Docs', items: [ { label: 'Getting Started', to: '/docs/overview', }, { label: 'Android Installation', to: 'docs/installation/android', }, { label: 'iOS Installation', to: 'docs/installation/ios', }, { label: 'Web Installation', to: 'docs/installation/web', }, ], }, { title: 'Community', items: [ { label: 'Stack Overflow', href: 'https://stackoverflow.com/questions/tagged/flutterfire', }, { label: 'Flutter', href: 'https://flutter.dev/', }, { label: 'pub.dev', href: 'https://pub.dev/', }, ], }, { title: 'Social', items: [ { label: 'GitHub', href: 'https://github.com/FirebaseExtended/flutterfire', }, { label: 'Twitter', href: 'https://twitter.com/flutterfiredev', }, ], }, ], copyright: `<div style="margin-top: 3rem"><small>Except as otherwise noted, this work is licensed under a Creative Commons Attribution 4.0 International License, and code samples are licensed under the BSD License.</small></div>`, }, googleAnalytics: { trackingID: 'G-8PJJN5LRR7', anonymizeIP: true, }, }, plugins: [ require.resolve('docusaurus-plugin-sass'), require.resolve('@docusaurus/plugin-ideal-image'), require.resolve('@docusaurus/plugin-google-analytics'), path.resolve(__dirname, './docusaurus-plugins/favicon-tags'), path.resolve(__dirname, './docusaurus-plugins/source-versions'), path.resolve(__dirname, './docusaurus-plugins/source-api-reference'), ], presets: [ [ '@docusaurus/preset-classic', { docs: { path: '../docs', sidebarPath: require.resolve('../docs/sidebars.js'), editUrl: 'https://github.com/FirebaseExtended/flutterfire/edit/master/docs/', }, theme: { customCss: require.resolve('./src/styles.scss'), }, }, ], ], };
website/docusaurus.config.js
const path = require('path'); module.exports = { title: 'FlutterFire', tagline: 'The official Firebase plugins for Flutter', url: 'https://firebase.flutter.dev', baseUrl: '/', favicon: '/favicon/favicon.ico', organizationName: 'FirebaseExtended', projectName: 'flutterfire', themeConfig: { announcementBar: { id: 'wip', content: 'The FlutterFire documentation hub is currently a work in progress. <a rel="noopener" target="_blank" href="https://github.com/FirebaseExtended/flutterfire/issues/2582"><b>Check out the roadmap to learn more.</b></a>.', backgroundColor: '#13B9FD', textColor: '#fff', }, algolia: { apiKey: '61eba190d4380f3db4e11d21b70e7608', indexName: 'flutterfire', }, prism: { additionalLanguages: [ 'dart', 'bash', 'java', 'kotlin', 'objectivec', 'swift', 'groovy', 'ruby', 'json', 'yaml', ], }, navbar: { title: 'FlutterFire', logo: { alt: 'FlutterFire Logo', src: '/img/flutterfire_300x.png', }, links: [ { to: 'docs/overview', activeBasePath: 'docs', label: 'Docs', position: 'right', }, { href: 'https://twitter.com/flutterfiredev', label: 'Twitter', position: 'right', }, { href: 'https://github.com/firebaseextended/flutterfire', label: 'GitHub', position: 'right', }, ], }, footer: { style: 'dark', links: [ { title: 'Docs', items: [ { label: 'Getting Started', to: '/docs/overview', }, { label: 'Android Installation', to: 'docs/installation/android', }, { label: 'iOS Installation', to: 'docs/installation/ios', }, { label: 'Web Installation', to: 'docs/installation/web', }, ], }, { title: 'Community', items: [ { label: 'Stack Overflow', href: 'https://stackoverflow.com/questions/tagged/flutterfire', }, { label: 'Flutter', href: 'https://flutter.dev/', }, { label: 'pub.dev', href: 'https://pub.dev/', }, ], }, { title: 'Social', items: [ { label: 'GitHub', href: 'https://github.com/FirebaseExtended/flutterfire', }, { label: 'Twitter', href: 'https://twitter.com/flutterfiredev', }, ], }, ], copyright: `<div style="margin-top: 3rem"><small>Except as otherwise noted, this work is licensed under a Creative Commons Attribution 4.0 International License, and code samples are licensed under the BSD License.</small></div>`, }, }, plugins: [ require.resolve('docusaurus-plugin-sass'), require.resolve('@docusaurus/plugin-ideal-image'), path.resolve(__dirname, './docusaurus-plugins/favicon-tags'), path.resolve(__dirname, './docusaurus-plugins/source-versions'), path.resolve(__dirname, './docusaurus-plugins/source-api-reference'), ], presets: [ [ '@docusaurus/preset-classic', { docs: { path: '../docs', sidebarPath: require.resolve('../docs/sidebars.js'), editUrl: 'https://github.com/FirebaseExtended/flutterfire/edit/master/docs/', }, theme: { customCss: require.resolve('./src/styles.scss'), }, }, ], ], };
Add Google Analytics to Docs (#3322)
website/docusaurus.config.js
Add Google Analytics to Docs (#3322)
<ide><path>ebsite/docusaurus.config.js <ide> ], <ide> copyright: `<div style="margin-top: 3rem"><small>Except as otherwise noted, this work is licensed under a Creative Commons Attribution 4.0 International License, and code samples are licensed under the BSD License.</small></div>`, <ide> }, <add> googleAnalytics: { <add> trackingID: 'G-8PJJN5LRR7', <add> anonymizeIP: true, <add> }, <ide> }, <ide> plugins: [ <ide> require.resolve('docusaurus-plugin-sass'), <ide> require.resolve('@docusaurus/plugin-ideal-image'), <add> require.resolve('@docusaurus/plugin-google-analytics'), <ide> path.resolve(__dirname, './docusaurus-plugins/favicon-tags'), <ide> path.resolve(__dirname, './docusaurus-plugins/source-versions'), <ide> path.resolve(__dirname, './docusaurus-plugins/source-api-reference'),
Java
apache-2.0
9b56ce29e46e1c5798ebef77078b13d13c0e8709
0
astrapi69/jgeohash
package de.alpharogroup.jgeohash.geoip; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import com.maxmind.geoip.LookupService; import de.alpharogroup.lang.ClassExtensions; /** * The class {@link LookupServiceSingleton} is a singleton class for the {@link LookupService}. */ public final class LookupServiceSingleton { /** The Constant logger. */ private static final Logger LOGGER = Logger.getLogger(LookupServiceSingleton.class.getName()); /** The single instance of the {@link LookupService}. */ private static volatile LookupService instance; /** The constant for the file name prefix. */ private static final String PREFIX = "GeoLiteCity"; /** The Constant for the file name suffix. */ private static final String SUFFIX = ".dat"; /** * Private constructor. */ private LookupServiceSingleton() { } /** * Gets the single instance of the {@link LookupService}. * * @return the single instance of the {@link LookupService}. */ public static LookupService getInstance() { if (instance == null) { synchronized (LookupServiceSingleton.class) { // double check... if (instance == null) { File fileLocation = null; final InputStream is = ClassExtensions.getResourceAsStream(PREFIX + SUFFIX); try { fileLocation = inputStreamToFile(is); instance = new LookupService(fileLocation, LookupService.GEOIP_MEMORY_CACHE); } catch (final IOException e) { LOGGER.error("IOException in the initialization of the LookupService.", e); } } } } return instance; } /** * Creates a temporary file from the given {@link InputStream} object. Note: the created * temporary file from the given {@link InputStream} object will be deleted on finish of the * application. * * @param inputStream * the {@link InputStream} object * @return the temporary file from the given {@link InputStream} object * @throws IOException * Signals that an I/O exception has occurred. */ private static File inputStreamToFile(final InputStream inputStream) throws IOException { final File tempFile = File.createTempFile(PREFIX, SUFFIX); tempFile.deleteOnExit(); final FileOutputStream out = new FileOutputStream(tempFile); IOUtils.copy(inputStream, out); return tempFile; } }
jgeohash-geoip/src/main/java/de/alpharogroup/jgeohash/geoip/LookupServiceSingleton.java
package de.alpharogroup.jgeohash.geoip; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import com.maxmind.geoip.LookupService; import de.alpharogroup.lang.ClassExtensions; /** * The class {@link LookupServiceSingleton} is a singleton class for the {@link LookupService}. */ public final class LookupServiceSingleton { /** The Constant logger. */ private static final Logger LOGGER = Logger.getLogger(LookupServiceSingleton.class.getName()); /** The single instance of the {@link LookupService}. */ private static LookupService instance; /** The constant for the file name prefix. */ private static final String PREFIX = "GeoLiteCity"; /** The Constant for the file name suffix. */ private static final String SUFFIX = ".dat"; /** * Private constructor. */ private LookupServiceSingleton() { } /** * Gets the single instance of the {@link LookupService}. * * @return the single instance of the {@link LookupService}. */ public static synchronized LookupService getInstance() { if (instance == null) { synchronized (LookupServiceSingleton.class) { File fileLocation = null; final InputStream is = ClassExtensions.getResourceAsStream(PREFIX + SUFFIX); try { fileLocation = inputStreamToFile(is); instance = new LookupService(fileLocation, LookupService.GEOIP_MEMORY_CACHE); } catch (final IOException e) { LOGGER.error("IOException in the initialization of the LookupService.", e); } } } return instance; } /** * Creates a temporary file from the given {@link InputStream} object. Note: the created * temporary file from the given {@link InputStream} object will be deleted on finish of the * application. * * @param inputStream * the {@link InputStream} object * @return the temporary file from the given {@link InputStream} object * @throws IOException * Signals that an I/O exception has occurred. */ private static File inputStreamToFile(final InputStream inputStream) throws IOException { final File tempFile = File.createTempFile(PREFIX, SUFFIX); tempFile.deleteOnExit(); final FileOutputStream out = new FileOutputStream(tempFile); IOUtils.copy(inputStream, out); return tempFile; } }
Added double check to singleton
jgeohash-geoip/src/main/java/de/alpharogroup/jgeohash/geoip/LookupServiceSingleton.java
Added double check to singleton
<ide><path>geohash-geoip/src/main/java/de/alpharogroup/jgeohash/geoip/LookupServiceSingleton.java <ide> private static final Logger LOGGER = Logger.getLogger(LookupServiceSingleton.class.getName()); <ide> <ide> /** The single instance of the {@link LookupService}. */ <del> private static LookupService instance; <add> private static volatile LookupService instance; <ide> <ide> /** The constant for the file name prefix. */ <ide> private static final String PREFIX = "GeoLiteCity"; <ide> * <ide> * @return the single instance of the {@link LookupService}. <ide> */ <del> public static synchronized LookupService getInstance() <add> public static LookupService getInstance() <ide> { <ide> if (instance == null) <ide> { <ide> synchronized (LookupServiceSingleton.class) <del> { <del> File fileLocation = null; <del> final InputStream is = ClassExtensions.getResourceAsStream(PREFIX + SUFFIX); <del> try <del> { <del> fileLocation = inputStreamToFile(is); <del> instance = new LookupService(fileLocation, LookupService.GEOIP_MEMORY_CACHE); <del> } <del> catch (final IOException e) <del> { <del> LOGGER.error("IOException in the initialization of the LookupService.", e); <add> { // double check... <add> if (instance == null) { <add> File fileLocation = null; <add> final InputStream is = ClassExtensions.getResourceAsStream(PREFIX + SUFFIX); <add> try <add> { <add> fileLocation = inputStreamToFile(is); <add> instance = new LookupService(fileLocation, LookupService.GEOIP_MEMORY_CACHE); <add> } <add> catch (final IOException e) <add> { <add> LOGGER.error("IOException in the initialization of the LookupService.", e); <add> } <ide> } <ide> } <ide> }
Java
lgpl-2.1
c9f6def47366e358abfd8fb2fb40315936ffbcae
0
phoenixctms/ctsms,phoenixctms/ctsms,phoenixctms/ctsms,phoenixctms/ctsms
// license-header java merge-point /** * This is only generated once! It will never be overwritten. * You can (and have to!) safely modify it by hand. * TEMPLATE: SpringServiceImpl.vsl in andromda-spring cartridge * MODEL CLASS: AndroMDAModel::ctsms::org.phoenixctms.ctsms::service::proband::ProbandService * STEREOTYPE: Service */ package org.phoenixctms.ctsms.service.proband; import java.awt.Dimension; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.crypto.SecretKey; import org.hibernate.LockMode; import org.phoenixctms.ctsms.adapt.DiagnosisCollisionFinder; import org.phoenixctms.ctsms.adapt.InquiryValueCollisionFinder; import org.phoenixctms.ctsms.adapt.MaxCostTypesAdapter; import org.phoenixctms.ctsms.adapt.MedicationCollisionFinder; import org.phoenixctms.ctsms.adapt.ProbandAddressTypeTagAdapter; import org.phoenixctms.ctsms.adapt.ProbandContactDetailTypeTagAdapter; import org.phoenixctms.ctsms.adapt.ProbandStatusEntryCollisionFinder; import org.phoenixctms.ctsms.adapt.ProbandTagAdapter; import org.phoenixctms.ctsms.adapt.ProcedureCollisionFinder; import org.phoenixctms.ctsms.compare.InquiryValueOutVOComparator; import org.phoenixctms.ctsms.compare.ProbandStatusEntryIntervalComparator; import org.phoenixctms.ctsms.domain.AlphaId; import org.phoenixctms.ctsms.domain.AnimalContactParticulars; import org.phoenixctms.ctsms.domain.Asp; import org.phoenixctms.ctsms.domain.AspDao; import org.phoenixctms.ctsms.domain.AspSubstance; import org.phoenixctms.ctsms.domain.AspSubstanceDao; import org.phoenixctms.ctsms.domain.BankAccount; import org.phoenixctms.ctsms.domain.BankAccountDao; import org.phoenixctms.ctsms.domain.Department; import org.phoenixctms.ctsms.domain.Diagnosis; import org.phoenixctms.ctsms.domain.DiagnosisDao; import org.phoenixctms.ctsms.domain.File; import org.phoenixctms.ctsms.domain.FileDao; import org.phoenixctms.ctsms.domain.InputField; import org.phoenixctms.ctsms.domain.InputFieldDao; import org.phoenixctms.ctsms.domain.InputFieldValue; import org.phoenixctms.ctsms.domain.Inquiry; import org.phoenixctms.ctsms.domain.InquiryDao; import org.phoenixctms.ctsms.domain.InquiryValue; import org.phoenixctms.ctsms.domain.InquiryValueDao; import org.phoenixctms.ctsms.domain.InventoryBookingDao; import org.phoenixctms.ctsms.domain.JournalEntry; import org.phoenixctms.ctsms.domain.JournalEntryDao; import org.phoenixctms.ctsms.domain.MassMailRecipient; import org.phoenixctms.ctsms.domain.MassMailRecipientDao; import org.phoenixctms.ctsms.domain.Medication; import org.phoenixctms.ctsms.domain.MedicationDao; import org.phoenixctms.ctsms.domain.MimeType; import org.phoenixctms.ctsms.domain.MoneyTransfer; import org.phoenixctms.ctsms.domain.MoneyTransferDao; import org.phoenixctms.ctsms.domain.NotificationDao; import org.phoenixctms.ctsms.domain.OpsCode; import org.phoenixctms.ctsms.domain.PrivacyConsentStatusType; import org.phoenixctms.ctsms.domain.PrivacyConsentStatusTypeDao; import org.phoenixctms.ctsms.domain.Proband; import org.phoenixctms.ctsms.domain.ProbandAddress; import org.phoenixctms.ctsms.domain.ProbandAddressDao; import org.phoenixctms.ctsms.domain.ProbandCategory; import org.phoenixctms.ctsms.domain.ProbandContactDetailValue; import org.phoenixctms.ctsms.domain.ProbandContactDetailValueDao; import org.phoenixctms.ctsms.domain.ProbandContactParticulars; import org.phoenixctms.ctsms.domain.ProbandDao; import org.phoenixctms.ctsms.domain.ProbandGroup; import org.phoenixctms.ctsms.domain.ProbandGroupDao; import org.phoenixctms.ctsms.domain.ProbandListEntry; import org.phoenixctms.ctsms.domain.ProbandStatusEntry; import org.phoenixctms.ctsms.domain.ProbandStatusEntryDao; import org.phoenixctms.ctsms.domain.ProbandStatusType; import org.phoenixctms.ctsms.domain.ProbandTagValue; import org.phoenixctms.ctsms.domain.ProbandTagValueDao; import org.phoenixctms.ctsms.domain.Procedure; import org.phoenixctms.ctsms.domain.ProcedureDao; import org.phoenixctms.ctsms.domain.Staff; import org.phoenixctms.ctsms.domain.Trial; import org.phoenixctms.ctsms.domain.TrialDao; import org.phoenixctms.ctsms.domain.User; import org.phoenixctms.ctsms.domain.VisitScheduleItem; import org.phoenixctms.ctsms.domain.VisitScheduleItemDao; import org.phoenixctms.ctsms.enumeration.FileModule; import org.phoenixctms.ctsms.enumeration.JournalModule; import org.phoenixctms.ctsms.enumeration.PaymentMethod; import org.phoenixctms.ctsms.enumeration.Sex; import org.phoenixctms.ctsms.enumeration.VariablePeriod; import org.phoenixctms.ctsms.excel.VisitScheduleExcelWriter; import org.phoenixctms.ctsms.exception.ServiceException; import org.phoenixctms.ctsms.pdf.PDFImprinter; import org.phoenixctms.ctsms.pdf.ProbandLetterPDFPainter; import org.phoenixctms.ctsms.security.CipherText; import org.phoenixctms.ctsms.security.CryptoUtil; import org.phoenixctms.ctsms.security.reencrypt.ReEncrypter; import org.phoenixctms.ctsms.util.CheckIDUtil; import org.phoenixctms.ctsms.util.CommonUtil; import org.phoenixctms.ctsms.util.CoreUtil; import org.phoenixctms.ctsms.util.DefaultSettings; import org.phoenixctms.ctsms.util.L10nUtil; import org.phoenixctms.ctsms.util.L10nUtil.Locales; import org.phoenixctms.ctsms.util.ServiceExceptionCodes; import org.phoenixctms.ctsms.util.ServiceUtil; import org.phoenixctms.ctsms.util.SettingCodes; import org.phoenixctms.ctsms.util.Settings; import org.phoenixctms.ctsms.util.Settings.Bundle; import org.phoenixctms.ctsms.util.SystemMessageCodes; import org.phoenixctms.ctsms.util.date.DateCalc; import org.phoenixctms.ctsms.util.date.DateInterval; import org.phoenixctms.ctsms.vo.AuthenticationVO; import org.phoenixctms.ctsms.vo.BankAccountInVO; import org.phoenixctms.ctsms.vo.BankAccountOutVO; import org.phoenixctms.ctsms.vo.DiagnosisInVO; import org.phoenixctms.ctsms.vo.DiagnosisOutVO; import org.phoenixctms.ctsms.vo.InquiriesPDFVO; import org.phoenixctms.ctsms.vo.InquiryValueInVO; import org.phoenixctms.ctsms.vo.InquiryValueJsonVO; import org.phoenixctms.ctsms.vo.InquiryValueOutVO; import org.phoenixctms.ctsms.vo.InquiryValuesOutVO; import org.phoenixctms.ctsms.vo.InventoryBookingOutVO; import org.phoenixctms.ctsms.vo.MedicationInVO; import org.phoenixctms.ctsms.vo.MedicationOutVO; import org.phoenixctms.ctsms.vo.MoneyTransferInVO; import org.phoenixctms.ctsms.vo.MoneyTransferOutVO; import org.phoenixctms.ctsms.vo.PSFVO; import org.phoenixctms.ctsms.vo.ProbandAddressInVO; import org.phoenixctms.ctsms.vo.ProbandAddressOutVO; import org.phoenixctms.ctsms.vo.ProbandContactDetailValueInVO; import org.phoenixctms.ctsms.vo.ProbandContactDetailValueOutVO; import org.phoenixctms.ctsms.vo.ProbandGroupOutVO; import org.phoenixctms.ctsms.vo.ProbandImageInVO; import org.phoenixctms.ctsms.vo.ProbandImageOutVO; import org.phoenixctms.ctsms.vo.ProbandInVO; import org.phoenixctms.ctsms.vo.ProbandLetterPDFVO; import org.phoenixctms.ctsms.vo.ProbandOutVO; import org.phoenixctms.ctsms.vo.ProbandStatusEntryInVO; import org.phoenixctms.ctsms.vo.ProbandStatusEntryOutVO; import org.phoenixctms.ctsms.vo.ProbandTagValueInVO; import org.phoenixctms.ctsms.vo.ProbandTagValueOutVO; import org.phoenixctms.ctsms.vo.ProcedureInVO; import org.phoenixctms.ctsms.vo.ProcedureOutVO; import org.phoenixctms.ctsms.vo.ReimbursementsExcelVO; import org.phoenixctms.ctsms.vo.TrialOutVO; import org.phoenixctms.ctsms.vo.VisitScheduleExcelVO; import org.phoenixctms.ctsms.vo.VisitScheduleItemOutVO; import org.phoenixctms.ctsms.vocycle.ProbandReflexionGraph; /** * @see org.phoenixctms.ctsms.service.proband.ProbandService */ public class ProbandServiceImpl extends ProbandServiceBase { private static JournalEntry logSystemMessage(Proband proband, ProbandAddressOutVO addressVO, Timestamp now, User modified, String systemMessageCode, Object result, Object original, JournalEntryDao journalEntryDao) throws Exception { boolean journalEncrypted = CommonUtil.getUseJournalEncryption(JournalModule.PROBAND_JOURNAL, null); return journalEntryDao.addSystemMessage(proband, now, modified, systemMessageCode, journalEncrypted ? new Object[] { addressVO.getName() } : null, new Object[] { CoreUtil.getSystemMessageCommentContent(result, original, !journalEncrypted) }); } private static JournalEntry logSystemMessage(Trial trial, ProbandOutVO probandVO, Timestamp now, User modified, String systemMessageCode, Object result, Object original, JournalEntryDao journalEntryDao) throws Exception { boolean journalEncrypted = CommonUtil.getUseJournalEncryption(JournalModule.PROBAND_JOURNAL, null); return journalEntryDao.addSystemMessage(trial, now, modified, systemMessageCode, journalEncrypted ? new Object[] { CommonUtil.probandOutVOToString(probandVO) } : new Object[] { Long.toString(probandVO.getId()) }, new Object[] { CoreUtil.getSystemMessageCommentContent(result, original, !journalEncrypted) }); } private void addUpdateInquiryValue(InquiryValueInVO inquiryValueIn, Proband proband, Inquiry inquiry, Timestamp now, User user, boolean force, boolean logTrial, boolean logProband, ArrayList<InquiryValueOutVO> outInquiryValues, ArrayList<InquiryValueJsonVO> outJsInquiryValues) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); Long id = inquiryValueIn.getId(); InquiryValueOutVO result = null; InquiryValueJsonVO resultJs = null; JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (id == null) { if (inquiry.isDisabled()) { inquiryValueIn = ServiceUtil.createPresetInquiryInValue(inquiry, proband.getId(), this.getInputFieldSelectionSetValueDao()); } checkInquiryValueInput(inquiryValueIn, proband, inquiry); ServiceUtil.addAutocompleteSelectionSetValue(inquiry.getField(), inquiryValueIn.getTextValue(), now, user, this.getInputFieldSelectionSetValueDao(), journalEntryDao); InquiryValue inquiryValue = inquiryValueDao.inquiryValueInVOToEntity(inquiryValueIn); CoreUtil.modifyVersion(inquiryValue, now, user); InputFieldValue inputFieldValue = inquiryValue.getValue(); this.getInputFieldValueDao().create(inputFieldValue); inquiryValue = inquiryValueDao.create(inquiryValue); if (outInquiryValues != null || logTrial || logProband) { result = inquiryValueDao.toInquiryValueOutVO(inquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(inquiryValue); } if (logProband) { ServiceUtil.logSystemMessage(proband, result.getInquiry().getTrial(), now, user, SystemMessageCodes.INQUIRY_VALUE_CREATED, result, null, journalEntryDao); } if (logTrial) { ServiceUtil.logSystemMessage(inquiry.getTrial(), result.getProband(), now, user, SystemMessageCodes.INQUIRY_VALUE_CREATED, result, null, journalEntryDao); } } else { InquiryValue originalInquiryValue = CheckIDUtil.checkInquiryValueId(id, inquiryValueDao); if (!inquiry.isDisabled() && !ServiceUtil.inquiryValueEquals(inquiryValueIn, originalInquiryValue.getValue(), force)) { checkInquiryValueInput(inquiryValueIn, proband, inquiry); ServiceUtil.addAutocompleteSelectionSetValue(inquiry.getField(), inquiryValueIn.getTextValue(), now, user, this.getInputFieldSelectionSetValueDao(), journalEntryDao); InquiryValueOutVO original = null; if (logProband || logTrial) { original = inquiryValueDao.toInquiryValueOutVO(originalInquiryValue); } inquiryValueDao.evict(originalInquiryValue); InquiryValue inquiryValue = inquiryValueDao.inquiryValueInVOToEntity(inquiryValueIn); CoreUtil.modifyVersion(originalInquiryValue, inquiryValue, now, user); inquiryValueDao.update(inquiryValue); if (outInquiryValues != null || logTrial || logProband) { result = inquiryValueDao.toInquiryValueOutVO(inquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(inquiryValue); } if (logProband) { ServiceUtil.logSystemMessage(proband, result.getInquiry().getTrial(), now, user, SystemMessageCodes.INQUIRY_VALUE_UPDATED, result, original, journalEntryDao); } if (logTrial) { ServiceUtil .logSystemMessage(inquiry.getTrial(), result.getProband(), now, user, SystemMessageCodes.INQUIRY_VALUE_UPDATED, result, original, journalEntryDao); } } else { if (outInquiryValues != null) { result = inquiryValueDao.toInquiryValueOutVO(originalInquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(originalInquiryValue); } } } if (outInquiryValues != null) { outInquiryValues.add(result); } if (resultJs != null) { outJsInquiryValues.add(resultJs); } } private void checkBankAccountInput(BankAccountInVO bankAccountIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(bankAccountIn.getProbandId(), probandDao); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); String iban = bankAccountIn.getIban(); String bic = bankAccountIn.getBic(); String accountNumber = bankAccountIn.getAccountNumber(); String bankCodeNumber = bankAccountIn.getBankCodeNumber(); if (bankAccountIn.getNa()) { if (bankAccountIn.getAccountHolderName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_HOLDER_NAME_NOT_NULL); } if (bankAccountIn.getBankName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_NAME_NOT_NULL); } if (iban != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.IBAN_NOT_NULL); } if (bic != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BIC_NOT_NULL); } if (accountNumber != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_NUMBER_NOT_NULL); } if (bankCodeNumber != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_CODE_NUMBER_NOT_NULL); } } else { if (CommonUtil.isEmptyString(bankAccountIn.getAccountHolderName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_HOLDER_NAME_REQUIRED); } if (CommonUtil.isEmptyString(iban) != CommonUtil.isEmptyString(bic)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_IBAN_AND_BIC_REQUIRED); } if (!CommonUtil.isEmptyString(iban) && !CommonUtil.checkIban(iban)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_IBAN); } if (!CommonUtil.isEmptyString(bic) && !CommonUtil.checkBic(bic)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_BIC); } if (CommonUtil.isEmptyString(accountNumber) != CommonUtil.isEmptyString(bankCodeNumber)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_ACCOUNT_NUMBER_AND_BANK_CODE_NUMBER_REQUIRED); } if (CommonUtil.isEmptyString(iban) && CommonUtil.isEmptyString(accountNumber)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.IBAN_OR_BANK_ACCOUNT_ACCOUNT_NUMBER_REQUIRED); } } } private void checkDiagnosisInput(DiagnosisInVO diagnosisIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(diagnosisIn.getProbandId(), probandDao); AlphaId alphaId = CheckIDUtil.checkAlphaIdId(diagnosisIn.getCodeId(), this.getAlphaIdDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (diagnosisIn.getStart() == null && diagnosisIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_START_DATE_REQUIRED); } // other input checks if (diagnosisIn.getStart() != null && diagnosisIn.getStop() != null && diagnosisIn.getStop().compareTo(diagnosisIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new DiagnosisCollisionFinder(probandDao, this.getDiagnosisDao())).collides(diagnosisIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_OVERLAPPING); } } private void checkInquiryValueInput(InquiryValueInVO inquiryValueIn, Proband proband, Inquiry inquiry) throws ServiceException { InputFieldDao inputFieldDao = this.getInputFieldDao(); InputField inputField = inquiry.getField(); inputFieldDao.lock(inputField, LockMode.PESSIMISTIC_WRITE); ServiceUtil.checkInputFieldTextValue(inputField, inquiry.isOptional(), inquiryValueIn.getTextValue(), inputFieldDao, this.getInputFieldSelectionSetValueDao()); ServiceUtil.checkInputFieldBooleanValue(inputField, inquiry.isOptional(), inquiryValueIn.getBooleanValue(), inputFieldDao); ServiceUtil.checkInputFieldLongValue(inputField, inquiry.isOptional(), inquiryValueIn.getLongValue(), inputFieldDao); ServiceUtil.checkInputFieldFloatValue(inputField, inquiry.isOptional(), inquiryValueIn.getFloatValue(), inputFieldDao); ServiceUtil.checkInputFieldDateValue(inputField, inquiry.isOptional(), inquiryValueIn.getDateValue(), inputFieldDao); ServiceUtil.checkInputFieldTimeValue(inputField, inquiry.isOptional(), inquiryValueIn.getTimeValue(), inputFieldDao); ServiceUtil.checkInputFieldTimestampValue(inputField, inquiry.isOptional(), inquiryValueIn.getTimestampValue(), inputFieldDao); ServiceUtil.checkInputFieldInkValue(inputField, inquiry.isOptional(), inquiryValueIn.getInkValues(), inputFieldDao); ServiceUtil.checkInputFieldSelectionSetValues(inputField, inquiry.isOptional(), inquiryValueIn.getSelectionValueIds(), inputFieldDao, this.getInputFieldSelectionSetValueDao()); if ((new InquiryValueCollisionFinder(this.getProbandDao(), this.getInquiryValueDao())).collides(inquiryValueIn)) { throw L10nUtil .initServiceException(ServiceExceptionCodes.INQUIRY_VALUE_ALREADY_EXISTS, CommonUtil.inputFieldOutVOToString(inputFieldDao.toInputFieldOutVO(inputField))); } } private void checkMedicationInput(MedicationInVO medicationIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(medicationIn.getProbandId(), probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); AspDao aspDao = this.getAspDao(); Asp asp = null; if (medicationIn.getAspId() != null) { asp = CheckIDUtil.checkAspId(medicationIn.getAspId(), aspDao); } AspSubstanceDao aspSubstanceDao = this.getAspSubstanceDao(); Collection<Long> substanceIds = medicationIn.getSubstanceIds(); if (substanceIds != null && substanceIds.size() > 0) { Iterator<Long> it = substanceIds.iterator(); HashSet<Long> dupeCheck = new HashSet<Long>(substanceIds.size()); HashSet<Long> aspSubstanceIds; Collection<AspSubstance> aspSubstances; if (asp != null && ((aspSubstances = asp.getSubstances()) != null) && aspSubstances.size() > 0) { aspSubstanceIds = new HashSet<Long>(aspSubstances.size()); Iterator<AspSubstance> aspSubstancesIt = aspSubstances.iterator(); while (aspSubstancesIt.hasNext()) { aspSubstanceIds.add(aspSubstancesIt.next().getId()); } } else { aspSubstanceIds = new HashSet<Long>(); } while (it.hasNext()) { Long id = it.next(); if (id == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_ID_IS_NULL); } AspSubstance substance = CheckIDUtil.checkAspSubstanceId(id, aspSubstanceDao); if (!dupeCheck.add(substance.getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DUPLICATE_SUBSTANCE, aspSubstanceDao.toAspSubstanceVO(substance).getName()); } if (asp != null && !aspSubstanceIds.remove(id)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_NOT_CONTAINED, aspDao.toAspVO(asp).getName(), aspSubstanceDao.toAspSubstanceVO(substance).getName()); } } if (asp != null && aspSubstanceIds.size() > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_MISSING, aspDao.toAspVO(asp).getName(), ServiceUtil.aspSubstanceIDsToString(aspSubstanceIds, this.getAspSubstanceDao())); } } else { if (asp == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCES_REQUIRED); } } Diagnosis diagnosis = null; if (medicationIn.getDiagnosisId() != null) { diagnosis = CheckIDUtil.checkDiagnosisId(medicationIn.getDiagnosisId(), this.getDiagnosisDao()); if (!proband.equals(diagnosis.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_WRONG_DIAGNOSIS, proband.getId().toString()); } } Procedure procedure = null; if (medicationIn.getProcedureId() != null) { procedure = CheckIDUtil.checkProcedureId(medicationIn.getProcedureId(), this.getProcedureDao()); if (!proband.equals(procedure.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_WRONG_PROCEDURE, proband.getId().toString()); } } if (medicationIn.getDoseValue() != null) { if (medicationIn.getDoseValue() <= 0.0f) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_VALUE_LESS_THAN_OR_EQUAL_ZERO); } if (CommonUtil.isEmptyString(medicationIn.getDoseUnit())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_UNIT_REQUIRED); } } else { if (medicationIn.getDoseUnit() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_UNIT_NOT_NULL); } } if (medicationIn.getStart() == null && medicationIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_START_DATE_REQUIRED); } // other input checks if (medicationIn.getStart() != null && medicationIn.getStop() != null && medicationIn.getStop().compareTo(medicationIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new MedicationCollisionFinder(probandDao, this.getMedicationDao())).collides(medicationIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_OVERLAPPING); } } private void checkMoneyTransferInput(MoneyTransferInVO moneyTransferIn, Long maxAllowedCostTypes) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(moneyTransferIn.getProbandId(), probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); if (moneyTransferIn.getTrialId() != null) { Trial trial = CheckIDUtil.checkTrialId(moneyTransferIn.getTrialId(), this.getTrialDao()); ServiceUtil.checkTrialLocked(trial); (new MaxCostTypesAdapter(maxAllowedCostTypes, this.getTrialDao(), this.getMoneyTransferDao())).checkCategoryInput(moneyTransferIn); } BankAccount bankAccount = null; if (moneyTransferIn.getBankAccountId() != null) { bankAccount = CheckIDUtil.checkBankAccountId(moneyTransferIn.getBankAccountId(), this.getBankAccountDao()); if (!proband.equals(bankAccount.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_WRONG_BANK_ACCOUNT, proband.getId().toString()); } } if (PaymentMethod.WIRE_TRANSFER.equals(moneyTransferIn.getMethod())) { if (bankAccount == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_BANK_ACCOUNT_REQUIRED); } } else { if (bankAccount != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_BANK_ACCOUNT_NOT_NULL); } if (moneyTransferIn.getReasonForPayment() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_REASON_FORM_PAYMENT_NOT_NULL); } if (moneyTransferIn.getReference() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_REFERENCE_NOT_NULL); } } if (PaymentMethod.VOUCHER.equals(moneyTransferIn.getMethod())) { if (moneyTransferIn.getAmount() < 0.0f) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_AMOUNT_NEGATIVE); } } else { if (moneyTransferIn.getVoucherCode() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_VOUCHER_CODE_NOT_NULL); } } if (moneyTransferIn.getShowComment() && CommonUtil.isEmptyString(moneyTransferIn.getComment())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_COMMENT_REQUIRED); } } private void checkParents(ProbandInVO probandIn, Proband child) throws ServiceException { Iterator<Proband> parentsIt = child.getParents().iterator(); int parentCount = 0; HashSet<Sex> parentGenders = new HashSet<Sex>(Sex.literals().size()); boolean isParent = false; while (parentsIt.hasNext()) { Proband parent = parentsIt.next(); if (parent.getId().equals(probandIn.getId())) { isParent = true; break; } if (parent.isPerson()) { ProbandContactParticulars personParticlars = parent.getPersonParticulars(); if (personParticlars != null && personParticlars.getGender() != null) { parentGenders.add(personParticlars.getGender()); } } else { AnimalContactParticulars animalParticlars = parent.getAnimalParticulars(); if (animalParticlars != null && animalParticlars.getGender() != null) { parentGenders.add(animalParticlars.getGender()); } } parentCount++; } if (!isParent) { ProbandDao probandDao = this.getProbandDao(); if (parentCount >= 2) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_TWO_PARENTS, child.getId().toString()); } if (probandIn.getGender() != null && !parentGenders.add(probandIn.getGender())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_PARENT_WITH_SAME_SEX, child.getId().toString(), L10nUtil.getSexName(Locales.USER, probandIn.getGender().name())); } } } private void checkProbandAddressInput(ProbandAddressInVO addressIn) throws ServiceException { (new ProbandAddressTypeTagAdapter(this.getProbandDao(), this.getAddressTypeDao())).checkTagValueInput(addressIn); } private void checkProbandContactDetailValueInput(ProbandContactDetailValueInVO contactValueIn) throws ServiceException { (new ProbandContactDetailTypeTagAdapter(this.getProbandDao(), this.getContactDetailTypeDao())).checkTagValueInput(contactValueIn); } private void checkProbandImageInput(ProbandImageInVO probandImage) throws ServiceException { if (probandImage.getDatas() != null && probandImage.getDatas().length > 0) { Integer probandImageSizeLimit = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_SIZE_LIMIT, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_SIZE_LIMIT); if (probandImageSizeLimit != null && probandImage.getDatas().length > probandImageSizeLimit) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_SIZE_LIMIT_EXCEEDED, CommonUtil.humanReadableByteCount(probandImageSizeLimit)); } if (probandImage.getMimeType() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_REQUIRED); } Iterator<MimeType> it = this.getMimeTypeDao().findByMimeTypeModule(probandImage.getMimeType(), FileModule.PROBAND_IMAGE).iterator(); if (!it.hasNext()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_UNKNOWN, probandImage.getMimeType()); } if (!it.next().isImage()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_NO_IMAGE, probandImage.getMimeType()); } Dimension imageDimension = CoreUtil.getImageDimension(probandImage.getDatas()); if (imageDimension == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_CANNOT_READ_DIMENSIONS); } else { Integer probandImageMinWidth = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_MIN_WIDTH, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_MIN_WIDTH); Integer probandImageMinHeight = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_MIN_HEIGHT, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_MIN_HEIGHT); if (probandImageMinWidth != null && imageDimension.getWidth() < (double) probandImageMinWidth) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_WIDTH_LESS_THAN_LIMIT, probandImageMinWidth); } if (probandImageMinHeight != null && imageDimension.getHeight() < (double) probandImageMinHeight) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_HEIGHT_LESS_THAN_LIMIT, probandImageMinHeight); } } } } private void checkProbandInput(ProbandInVO probandIn) throws ServiceException { // referential checks CheckIDUtil.checkDepartmentId(probandIn.getDepartmentId(), this.getDepartmentDao()); ProbandCategory category = CheckIDUtil.checkProbandCategoryId(probandIn.getCategoryId(), this.getProbandCategoryDao()); if (probandIn.getPhysicianId() != null) { CheckIDUtil.checkStaffId(probandIn.getPhysicianId(), this.getStaffDao()); } if (probandIn.getChildIds() != null && probandIn.getChildIds().size() > 0) { ProbandDao probandDao = this.getProbandDao(); ArrayList<Long> childIds = new ArrayList<Long>(probandIn.getChildIds()); Collections.sort(childIds); Iterator<Long> it = childIds.iterator(); HashSet<Long> dupeCheck = new HashSet<Long>(childIds.size()); while (it.hasNext()) { Long id = it.next(); if (id == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_NULL); } Proband child = CheckIDUtil.checkProbandId(id, probandDao, LockMode.PESSIMISTIC_WRITE); if (!dupeCheck.add(child.getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DUPLICATE_PROBAND_CHILD, child.getId().toString()); } checkParents(probandIn, child); } } // other input checks if (probandIn.isPerson()) { if (!category.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } if (!probandIn.isBlinded()) { if (CommonUtil.isEmptyString(probandIn.getFirstName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_REQUIRED); } if (CommonUtil.isEmptyString(probandIn.getLastName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_REQUIRED); } if (probandIn.getDateOfBirth() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_REQUIRED); } else if (DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getGender() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_GENDER_REQUIRED); } if (probandIn.getAlias() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ALIAS_NOT_NULL); } } else { if (probandIn.getPrefixedTitle1() != null || probandIn.getPrefixedTitle2() != null || probandIn.getPrefixedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PREFIXED_TITLES_NOT_NULL); } if (probandIn.getFirstName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_NOT_NULL); } if (probandIn.getLastName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_NOT_NULL); } if (probandIn.getDateOfBirth() != null && DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getPostpositionedTitle1() != null || probandIn.getPostpositionedTitle2() != null || probandIn.getPostpositionedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_POSTPOSITIONED_TITLES_NOT_NULL); } if (probandIn.getCitizenship() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CITIZENSHIP_NOT_NULL); } } if (probandIn.getAnimalName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_NOT_NULL); } } else { if (!category.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } if (!probandIn.isBlinded()) { if (CommonUtil.isEmptyString(probandIn.getAnimalName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_REQUIRED); } if (probandIn.getDateOfBirth() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_REQUIRED); } else if (DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getGender() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_GENDER_REQUIRED); } if (probandIn.getAlias() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ALIAS_NOT_NULL); } } else { if (probandIn.getAnimalName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_NOT_NULL); } if (probandIn.getDateOfBirth() != null && DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } } if (probandIn.getPrefixedTitle1() != null || probandIn.getPrefixedTitle2() != null || probandIn.getPrefixedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PREFIXED_TITLES_NOT_NULL); } if (probandIn.getFirstName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_NOT_NULL); } if (probandIn.getLastName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_NOT_NULL); } if (probandIn.getPostpositionedTitle1() != null || probandIn.getPostpositionedTitle2() != null || probandIn.getPostpositionedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_POSTPOSITIONED_TITLES_NOT_NULL); } if (probandIn.getCitizenship() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CITIZENSHIP_NOT_NULL); } } if (probandIn.getRatingMax() != null) { if (probandIn.getRatingMax() <= 0l) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_MAX_LESS_THAN_OR_EQUAL_ZERO); } else if (probandIn.getRating() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_REQUIRED); } else { if (probandIn.getRating() < 0l) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_LESS_THAN_ZERO); } else if (probandIn.getRating() > probandIn.getRatingMax()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_GREATER_THAN_RATING_MAX); } } } else if (probandIn.getRating() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_NOT_NULL); } } private void checkProbandLoop(Proband proband) throws ServiceException { (new ProbandReflexionGraph(this.getProbandDao())).checkGraphLoop(proband, false, true); } private void checkProbandStatusEntryInput(ProbandStatusEntryInVO statusEntryIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(statusEntryIn.getProbandId(), probandDao); ProbandStatusType statusType = CheckIDUtil.checkProbandStatusTypeId(statusEntryIn.getTypeId(), this.getProbandStatusTypeDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (proband.isPerson() && !statusType.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandStatusTypeName(Locales.USER, statusType.getNameL10nKey())); } if (!proband.isPerson() && !statusType.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandStatusTypeName(Locales.USER, statusType.getNameL10nKey())); } // other input checks if (statusEntryIn.getStop() != null && statusEntryIn.getStop().compareTo(statusEntryIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_ENTRY_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new ProbandStatusEntryCollisionFinder(probandDao, this.getProbandStatusEntryDao())).collides(statusEntryIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_ENTRY_OVERLAPPING); } } private void checkProbandTagValueInput(ProbandTagValueInVO tagValueIn) throws ServiceException { (new ProbandTagAdapter(this.getProbandDao(), this.getProbandTagDao())).checkTagValueInput(tagValueIn); } private void checkProcedureInput(ProcedureInVO procedureIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(procedureIn.getProbandId(), probandDao); OpsCode opsCode = CheckIDUtil.checkOpsCodeId(procedureIn.getCodeId(), this.getOpsCodeDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (procedureIn.getStart() == null && procedureIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_START_DATE_REQUIRED); } // other input checks if (procedureIn.getStart() != null && procedureIn.getStop() != null && procedureIn.getStop().compareTo(procedureIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new ProcedureCollisionFinder(probandDao, this.getProcedureDao())).collides(procedureIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_OVERLAPPING); } } private InquiryValuesOutVO getInquiryValues(Trial trial, String category, ProbandOutVO probandVO, Boolean active, Boolean activeSignup, boolean jsValues, boolean loadAllJsValues, boolean sort, PSFVO psf) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); InquiryValuesOutVO result = new InquiryValuesOutVO(); Collection<Map> inquiryValues = inquiryValueDao.findByProbandTrialCategoryActiveJs(probandVO.getId(), trial.getId(), category, active, activeSignup, sort, null, psf); result.setPageValues(ServiceUtil.getInquiryValues(probandVO, inquiryValues, null, this.getInquiryDao(), inquiryValueDao)); if (jsValues) { if (loadAllJsValues) { result.setJsValues(ServiceUtil.getInquiryJsonValues( inquiryValueDao.findByProbandTrialActiveJs(probandVO.getId(), trial.getId(), active, activeSignup, sort, true, null), false, inquiryValueDao, this.getInputFieldSelectionSetValueDao())); } else { result.setJsValues(ServiceUtil.getInquiryJsonValues(inquiryValues, true, inquiryValueDao, this.getInputFieldSelectionSetValueDao())); } } return result; } @Override protected BankAccountOutVO handleAddBankAccount( AuthenticationVO auth, BankAccountInVO newBankAccount) throws Exception { checkBankAccountInput(newBankAccount); BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = bankAccountDao.bankAccountInVOToEntity(newBankAccount); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(bankAccount, now, user); bankAccount = bankAccountDao.create(bankAccount); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); ServiceUtil.logSystemMessage(bankAccount.getProband(), result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected DiagnosisOutVO handleAddDiagnosis(AuthenticationVO auth, DiagnosisInVO newDiagnosis) throws Exception { checkDiagnosisInput(newDiagnosis); DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = diagnosisDao.diagnosisInVOToEntity(newDiagnosis); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(diagnosis, now, user); diagnosis = diagnosisDao.create(diagnosis); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); ServiceUtil.logSystemMessage(diagnosis.getProband(), result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected MedicationOutVO handleAddMedication(AuthenticationVO auth, MedicationInVO newMedication) throws Exception { checkMedicationInput(newMedication); MedicationDao medicationDao = this.getMedicationDao(); Medication medication = medicationDao.medicationInVOToEntity(newMedication); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(medication, now, user); medication = medicationDao.create(medication); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(medication.getProband(), result.getProband(), now, user, SystemMessageCodes.MEDICATION_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleAddMoneyTransfer( AuthenticationVO auth, MoneyTransferInVO newMoneyTransfer, Long maxAllowedCostTypes) throws Exception { checkMoneyTransferInput(newMoneyTransfer, maxAllowedCostTypes); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = moneyTransferDao.moneyTransferInVOToEntity(newMoneyTransfer); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(moneyTransfer, now, user); moneyTransfer = moneyTransferDao.create(moneyTransfer); Trial trial = moneyTransfer.getTrial(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_CREATED, result, null, this.getJournalEntryDao()); } ServiceUtil .logSystemMessage(moneyTransfer.getProband(), result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleAddProband(AuthenticationVO auth, ProbandInVO newProband, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { checkProbandInput(newProband); User user = CoreUtil.getUser(); this.getUserDao().lock(user, LockMode.PESSIMISTIC_WRITE); if (!user.getDepartment().getId().equals(newProband.getDepartmentId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } Timestamp now = new Timestamp(System.currentTimeMillis()); Proband proband = ServiceUtil.createProband(newProband, now, user, this.getProbandDao(), this.getPrivacyConsentStatusTypeDao(), this.getProbandContactParticularsDao(), this.getAnimalContactParticularsDao(), this.getNotificationDao()); ProbandOutVO result = this.getProbandDao().toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_CREATED, result, null, journalEntryDao); Staff physician = proband.getPhysician(); if (physician != null) { ServiceUtil.logSystemMessage(physician, result, now, user, SystemMessageCodes.PROBAND_CREATED, result, null, journalEntryDao); } return result; } @Override protected ProbandAddressOutVO handleAddProbandAddress( AuthenticationVO auth, ProbandAddressInVO newProbandAddress) throws Exception { checkProbandAddressInput(newProbandAddress); ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = addressDao.probandAddressInVOToEntity(newProbandAddress); if (addressDao.getCount(address.getProband().getId(), null, null, true) == 0) { address.setWireTransfer(true); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(address, now, user); address = addressDao.create(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(address.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandContactDetailValueOutVO handleAddProbandContactDetailValue( AuthenticationVO auth, ProbandContactDetailValueInVO newProbandContactDetailValue) throws Exception { checkProbandContactDetailValueInput(newProbandContactDetailValue); ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = contactValueDao.probandContactDetailValueInVOToEntity(newProbandContactDetailValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(contactValue, now, user); contactValue = contactValueDao.create(contactValue); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); ServiceUtil.logSystemMessage(contactValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleAddProbandStatusEntry( AuthenticationVO auth, ProbandStatusEntryInVO newProbandStatusEntry) throws Exception { checkProbandStatusEntryInput(newProbandStatusEntry); ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = statusEntryDao.probandStatusEntryInVOToEntity(newProbandStatusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(statusEntry, now, user); statusEntry = statusEntryDao.create(statusEntry); notifyProbandInactive(statusEntry, now); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); ServiceUtil.logSystemMessage(statusEntry.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleAddProbandTagValue( AuthenticationVO auth, ProbandTagValueInVO newProbandTagValue) throws Exception { checkProbandTagValueInput(newProbandTagValue); ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = tagValueDao.probandTagValueInVOToEntity(newProbandTagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(tagValue, now, user); tagValue = tagValueDao.create(tagValue); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); ServiceUtil.logSystemMessage(tagValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleAddProcedure(AuthenticationVO auth, ProcedureInVO newProcedure) throws Exception { checkProcedureInput(newProcedure); ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = procedureDao.procedureInVOToEntity(newProcedure); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(procedure, now, user); procedure = procedureDao.create(procedure); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); ServiceUtil.logSystemMessage(procedure.getProband(), result.getProband(), now, user, SystemMessageCodes.PROCEDURE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected Collection<String> handleCompleteCostTypes(AuthenticationVO auth, Long trialDepartmentId, Long trialId, Long probandDepartmentId, Long probandId, String costTypePrefix, Integer limit) throws Exception { if (trialDepartmentId != null) { CheckIDUtil.checkDepartmentId(trialDepartmentId, this.getDepartmentDao()); } if (probandDepartmentId != null) { CheckIDUtil.checkDepartmentId(probandDepartmentId, this.getDepartmentDao()); } if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getMoneyTransferDao().findCostTypes(trialDepartmentId, trialId, probandDepartmentId, probandId, costTypePrefix, limit); } @Override protected BankAccountOutVO handleDeleteBankAccount(AuthenticationVO auth, Long bankAccountId) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); Proband proband = bankAccount.getProband(); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_BANK_ACCOUNT); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); proband.removeBankAccounts(bankAccount); bankAccount.setProband(null); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Iterator<MoneyTransfer> moneyTransfersIt = bankAccount.getMoneyTransfers().iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); MoneyTransferOutVO moneyTransferVO = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED_MONEY_TRANSFER_DELETED, moneyTransferVO, null, journalEntryDao); trial.removePayoffs(moneyTransfer); } moneyTransfer.setBankAccount(null); moneyTransferDao.remove(moneyTransfer); moneyTransfer.setProband(null); proband.removeMoneyTransfers(moneyTransfer); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED_MONEY_TRANSFER_DELETED, moneyTransferVO, null, journalEntryDao); } bankAccount.getMoneyTransfers().clear(); bankAccountDao.remove(bankAccount); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED, result, null, journalEntryDao); return result; } @Override protected DiagnosisOutVO handleDeleteDiagnosis(AuthenticationVO auth, Long diagnosisId) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = CheckIDUtil.checkDiagnosisId(diagnosisId, diagnosisDao); Proband proband = diagnosis.getProband(); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_DIAGNOSIS); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); AlphaId alphaId = diagnosis.getCode(); alphaId.removeDiagnoses(diagnosis); diagnosis.setCode(null); proband.removeDiagnoses(diagnosis); diagnosis.setProband(null); MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = diagnosis.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); MedicationOutVO originalMedicationVO = medicationDao.toMedicationOutVO(medication); medication.setDiagnosis(null); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); MedicationOutVO medicationVO = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_DELETED_MEDICATION_UPDATED, medicationVO, originalMedicationVO, journalEntryDao); } diagnosis.getMedications().clear(); diagnosisDao.remove(diagnosis); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_DELETED, result, null, journalEntryDao); return result; } @Override protected MedicationOutVO handleDeleteMedication(AuthenticationVO auth, Long medicationId) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication medication = CheckIDUtil.checkMedicationId(medicationId, medicationDao); Proband proband = medication.getProband(); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MEDICATION); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); Diagnosis diagnosis = medication.getDiagnosis(); Procedure procedure = medication.getProcedure(); proband.removeMedications(medication); medication.setProband(null); if (diagnosis != null) { diagnosis.removeMedications(medication); medication.setDiagnosis(null); } if (procedure != null) { procedure.removeMedications(medication); medication.setProcedure(null); } medicationDao.remove(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.MEDICATION_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleDeleteMoneyTransfer(AuthenticationVO auth, Long moneyTransferId) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); Proband proband = moneyTransfer.getProband(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); BankAccount bankAccount = moneyTransfer.getBankAccount(); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); trial.removePayoffs(moneyTransfer); moneyTransfer.setTrial(null); } proband.removeMoneyTransfers(moneyTransfer); moneyTransfer.setProband(null); if (bankAccount != null) { bankAccount.removeMoneyTransfers(moneyTransfer); moneyTransfer.setBankAccount(null); } moneyTransferDao.remove(moneyTransfer); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_DELETED, result, null, journalEntryDao); } ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_DELETED, result, null, journalEntryDao); return result; } @Override protected ProbandOutVO handleDeleteProband(AuthenticationVO auth, Long probandId, boolean defer, boolean force, String deferredDeleteReason, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ProbandOutVO result; if (!force && defer) { Proband originalProband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(originalProband, maxInstances, maxParentsDepth, maxChildrenDepth); if (original.getBlinded()) { if (!user.getDepartment().getId().equals(originalProband.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } } else { if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } probandDao.evict(originalProband); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); if (CommonUtil.isEmptyString(deferredDeleteReason)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DEFERRED_DELETE_REASON_REQUIRED); } proband.setDeferredDelete(true); proband.setDeferredDeleteReason(deferredDeleteReason); CoreUtil.modifyVersion(proband, proband.getVersion(), now, user); // no opt. locking probandDao.update(proband); result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_MARKED_FOR_DELETION, result, original, journalEntryDao); Iterator<ProbandOutVO> parentsIt = original.getParents().iterator(); while (parentsIt.hasNext()) { ProbandOutVO parent = parentsIt.next(); ServiceUtil.logSystemMessage(probandDao.load(parent.getId()), result, now, user, SystemMessageCodes.PROBAND_MARKED_FOR_DELETION, result, original, journalEntryDao); } } else { Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); if (result.getBlinded()) { if (!user.getDepartment().getId().equals(result.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } } else { if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } ServiceUtil.removeProband(proband, result, true, user, now, this.getProbandDao(), this.getProbandContactParticularsDao(), this.getAnimalContactParticularsDao(), journalEntryDao, this.getNotificationDao(), this.getNotificationRecipientDao(), this.getProbandTagValueDao(), this.getProbandContactDetailValueDao(), this.getProbandAddressDao(), this.getProbandStatusEntryDao(), this.getDiagnosisDao(), this.getProcedureDao(), this.getMedicationDao(), this.getInventoryBookingDao(), this.getMoneyTransferDao(), this.getBankAccountDao(), this.getProbandListStatusEntryDao(), this.getProbandListEntryDao(), this.getProbandListEntryTagValueDao(), this.getInputFieldValueDao(), this.getInquiryValueDao(), this.getECRFFieldValueDao(), this.getECRFFieldStatusEntryDao(), this.getSignatureDao(), this.getECRFStatusEntryDao(), this.getMassMailRecipientDao(), this.getJobDao(), this.getFileDao()); } return result; } @Override protected ProbandAddressOutVO handleDeleteProbandAddress( AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); Proband proband = address.getProband(); this.getProbandDao().lock(proband, LockMode.PESSIMISTIC_WRITE); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } ServiceUtil.checkProbandLocked(proband); if (address.isWireTransfer() && addressDao.getCount(address.getProband().getId(), null, null, null) > 1) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DELETE_WIRE_TRANSFER_PROBAND_ADDRESS); } proband.removeAddresses(address); address.setProband(null); addressDao.remove(address); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandContactDetailValueOutVO handleDeleteProbandContactDetailValue( AuthenticationVO auth, Long probandContactDetailValueId) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = CheckIDUtil.checkProbandContactDetailValueId(probandContactDetailValueId, contactValueDao); Proband proband = contactValue.getProband(); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_CONTACT_DETAIL_VALUE); } ServiceUtil.checkProbandLocked(proband); proband.removeContactDetailValues(contactValue); contactValue.setProband(null); contactValueDao.remove(contactValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleDeleteProbandStatusEntry( AuthenticationVO auth, Long probandStatusEntryId) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, statusEntryDao); Proband proband = statusEntry.getProband(); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_STATUS_ENTRY); } ServiceUtil.checkProbandLocked(proband); proband.removeStatusEntries(statusEntry); statusEntry.setProband(null); ServiceUtil.removeNotifications(statusEntry.getNotifications(), this.getNotificationDao(), this.getNotificationRecipientDao()); statusEntryDao.remove(statusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleDeleteProbandTagValue(AuthenticationVO auth, Long probandTagValueId) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = CheckIDUtil.checkProbandTagValueId(probandTagValueId, tagValueDao); Proband proband = tagValue.getProband(); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_TAG_VALUE); } ServiceUtil.checkProbandLocked(proband); proband.removeTagValues(tagValue); tagValue.setProband(null); tagValueDao.remove(tagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleDeleteProcedure(AuthenticationVO auth, Long procedureId) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = CheckIDUtil.checkProcedureId(procedureId, procedureDao); Proband proband = procedure.getProband(); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROCEDURE); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); OpsCode opsCode = procedure.getCode(); opsCode.removeProcedures(procedure); procedure.setCode(null); proband.removeProcedures(procedure); procedure.setProband(null); MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = procedure.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); MedicationOutVO originalMedicationVO = medicationDao.toMedicationOutVO(medication); medication.setProcedure(null); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); MedicationOutVO medicationVO = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROCEDURE_DELETED_MEDICATION_UPDATED, medicationVO, originalMedicationVO, journalEntryDao); } procedure.getMedications().clear(); procedureDao.remove(procedure); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROCEDURE_DELETED, result, null, journalEntryDao); return result; } @Override protected ReimbursementsExcelVO handleExportReimbursements( AuthenticationVO auth, Long probandId, String costType, PaymentMethod method, Boolean paid) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection<String> costTypes = moneyTransferDao.getCostTypes(null, null, null, probandVO.getId(), method); Collection<MoneyTransfer> moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, null, null, probandVO.getId(), method, costType, paid, null, null); ReimbursementsExcelVO result = ServiceUtil.createReimbursementsExcel(moneyTransfers, costTypes, null, probandVO, costType, method, paid, moneyTransferDao, this.getBankAccountDao(), this.getProbandAddressDao(), this.getAddressTypeDao(), this.getUserDao()); ServiceUtil.logSystemMessage(proband, result.getProband(), CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.REIMBURSEMENTS_EXPORTED, result, null, this.getJournalEntryDao()); return result; } @Override protected VisitScheduleExcelVO handleExportVisitSchedule( AuthenticationVO auth, Long probandId, Long trialId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); TrialOutVO trialVO = null; if (trialId != null) { trialVO = trialDao.toTrialOutVO(CheckIDUtil.checkTrialId(trialId, trialDao)); } VisitScheduleExcelWriter.Styles style = trialVO == null ? VisitScheduleExcelWriter.Styles.PROBAND_VISIT_SCHEDULE : VisitScheduleExcelWriter.Styles.PROBAND_TRIAL_VISIT_SCHEDULE; VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Collection<VisitScheduleItem> visitScheduleItems; switch (style) { case PROBAND_VISIT_SCHEDULE: visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(null, null, null, probandVO.getId(), null, true, null); break; case PROBAND_TRIAL_VISIT_SCHEDULE: visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(trialVO.getId(), null, null, probandVO.getId(), null, true, null); break; default: visitScheduleItems = null; } VisitScheduleExcelVO result = ServiceUtil.createVisitScheduleExcel(visitScheduleItems, style, probandVO, trialVO, visitScheduleItemDao, this.getProbandListStatusEntryDao(), this.getProbandAddressDao(), this.getUserDao()); switch (style) { case PROBAND_VISIT_SCHEDULE: ServiceUtil.logSystemMessage(proband, result.getProband(), CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.VISIT_SCHEDULE_EXPORTED, result, null, this.getJournalEntryDao()); break; case PROBAND_TRIAL_VISIT_SCHEDULE: ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.VISIT_SCHEDULE_EXPORTED, result, null, this.getJournalEntryDao()); break; default: } return result; } @Override protected Collection<ProbandOutVO> handleGetAutoDeletionProbands( AuthenticationVO auth, Date today, Long departmentId, Long probandCategoryId, VariablePeriod reminderPeriod, Long reminderPeriodDays, PSFVO psf) throws Exception { if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ServiceUtil.checkReminderPeriod(reminderPeriod, reminderPeriodDays); ProbandDao probandDao = this.getProbandDao(); Collection autoDeletionProbands = probandDao.findToBeAutoDeleted(today, departmentId, probandCategoryId, reminderPeriod, reminderPeriodDays, null, true, psf); probandDao.toProbandOutVOCollection(autoDeletionProbands); return autoDeletionProbands; } @Override protected BankAccountOutVO handleGetBankAccount(AuthenticationVO auth, Long bankAccountId) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); return result; } @Override protected long handleGetBankAccountCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getBankAccountDao().getCount(probandId); } @Override protected Collection<BankAccountOutVO> handleGetBankAccountList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } BankAccountDao bankAccountDao = this.getBankAccountDao(); Collection bankAccounts = bankAccountDao.findByProband(probandId, null, null, psf); bankAccountDao.toBankAccountOutVOCollection(bankAccounts); return bankAccounts; } @Override protected Collection<BankAccountOutVO> handleGetBankAccounts( AuthenticationVO auth, Long probandId, Boolean active, Long bankAccountId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } BankAccountDao bankAccountDao = this.getBankAccountDao(); if (bankAccountId != null) { CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); } Collection bankAccounts = bankAccountDao.findByProbandActiveId(probandId, active, bankAccountId); bankAccountDao.toBankAccountOutVOCollection(bankAccounts); return bankAccounts; } @Override protected Collection<InventoryBookingOutVO> handleGetCollidingProbandInventoryBookings( AuthenticationVO auth, Long probandStatusEntryId, Boolean isRelevantForProbandAppointments) throws Exception { ProbandStatusEntry probandStatus = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, this.getProbandStatusEntryDao()); Collection collidingInventoryBookings; if (!probandStatus.getType().isProbandActive()) { InventoryBookingDao inventoryBookingDao = this.getInventoryBookingDao(); collidingInventoryBookings = inventoryBookingDao.findByProbandCalendarInterval(probandStatus.getProband().getId(), null, probandStatus.getStart(), probandStatus.getStop(), isRelevantForProbandAppointments); inventoryBookingDao.toInventoryBookingOutVOCollection(collidingInventoryBookings); } else { collidingInventoryBookings = new ArrayList<InventoryBookingOutVO>(); } return collidingInventoryBookings; } @Override protected Collection<VisitScheduleItemOutVO> handleGetCollidingVisitScheduleItems( AuthenticationVO auth, Long probandStatusEntryId, boolean allProbandGroups) throws Exception { ProbandStatusEntry probandStatusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, this.getProbandStatusEntryDao()); if (!probandStatusEntry.getType().isProbandActive()) { Collection collidingVisitScheduleItems = new HashSet(); VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Iterator<ProbandListEntry> trialParticipationsIt = probandStatusEntry.getProband().getTrialParticipations().iterator(); while (trialParticipationsIt.hasNext()) { ProbandListEntry probandListEntry = trialParticipationsIt.next(); ProbandGroup probandGroup = probandListEntry.getGroup(); if (probandGroup != null) { collidingVisitScheduleItems .addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), probandListEntry.getProband().getId(), probandStatusEntry.getStart(), probandStatusEntry.getStop())); } else { if (allProbandGroups) { collidingVisitScheduleItems.addAll( visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), null, probandListEntry.getProband().getId(), probandStatusEntry.getStart(), probandStatusEntry.getStop())); } } } visitScheduleItemDao.toVisitScheduleItemOutVOCollection(collidingVisitScheduleItems); return new ArrayList<VisitScheduleItemOutVO>(collidingVisitScheduleItems); } else { return new ArrayList<VisitScheduleItemOutVO>(); } } @Override protected Collection<String> handleGetCostTypes(AuthenticationVO auth, Long trialDepartmentId, Long trialId, Long probandDepartmentId, Long probandId, PaymentMethod method) throws Exception { if (trialDepartmentId != null) { CheckIDUtil.checkDepartmentId(trialDepartmentId, this.getDepartmentDao()); } if (probandDepartmentId != null) { CheckIDUtil.checkDepartmentId(probandDepartmentId, this.getDepartmentDao()); } if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getMoneyTransferDao().getCostTypes(trialDepartmentId, trialId, probandDepartmentId, probandId, method); } @Override protected DiagnosisOutVO handleGetDiagnosis(AuthenticationVO auth, Long diagnosisId) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = CheckIDUtil.checkDiagnosisId(diagnosisId, diagnosisDao); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); return result; } @Override protected long handleGetDiagnosisCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getDiagnosisDao().getCount(probandId); } @Override protected Collection<DiagnosisOutVO> handleGetDiagnosisList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Collection diagnoses = diagnosisDao.findByProband(probandId, psf); diagnosisDao.toDiagnosisOutVOCollection(diagnoses); return diagnoses; } @Override protected long handleGetInquiryCount(AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } return this.getInquiryDao().getCount(trialId, active, activeSignup); } @Override protected long handleGetInquiryCount(AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } return this.getInquiryDao().getCount(trialId, category, active, activeSignup); } @Override protected Collection<InquiryValueOutVO> handleGetInquiryInputFieldValues( AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId, Long inputFieldId) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); CheckIDUtil.checkInputFieldId(inputFieldId, this.getInputFieldDao()); InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); Collection inquiryFieldValues = inquiryValueDao.findByTrialActiveProbandField(trialId, active, activeSignup, probandId, inputFieldId); inquiryValueDao.toInquiryValueOutVOCollection(inquiryFieldValues); return inquiryFieldValues; } @Override protected Collection<TrialOutVO> handleGetInquiryTrials(AuthenticationVO auth, Long probandId, Boolean active, Boolean activeSignup) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByInquiryValuesProbandSorted(null, probandId, active, activeSignup); trialDao.toTrialOutVOCollection(trials); return trials; } @Override protected InquiryValuesOutVO handleGetInquiryValue(AuthenticationVO auth, Long probandId, Long inquiryId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); InquiryDao inquiryDao = this.getInquiryDao(); Inquiry inquiry = CheckIDUtil.checkInquiryId(inquiryId, inquiryDao); InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); InquiryValuesOutVO result = new InquiryValuesOutVO(); Iterator<InquiryValue> it = inquiryValueDao.findByProbandInquiry(probandId, inquiryId).iterator(); if (it.hasNext()) { InquiryValue inquiryValue = it.next(); result.getPageValues().add(inquiryValueDao.toInquiryValueOutVO(inquiryValue)); if (!CommonUtil.isEmptyString(inquiryValue.getInquiry().getJsVariableName()) && Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { result.getJsValues().add(inquiryValueDao.toInquiryValueJsonVO(inquiryValue)); } } else { result.getPageValues().add( ServiceUtil.createPresetInquiryOutValue(probandDao.toProbandOutVO(proband), inquiryDao.toInquiryOutVO(inquiry), null)); if (!CommonUtil.isEmptyString(inquiry.getJsVariableName()) && Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { result.getJsValues().add(ServiceUtil.createPresetInquiryJsonValue(inquiry, this.getInputFieldSelectionSetValueDao())); } } return result; } @Override protected InquiryValueOutVO handleGetInquiryValueById(AuthenticationVO auth, Long inquiryValueId) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); return inquiryValueDao.toInquiryValueOutVO(CheckIDUtil.checkInquiryValueId(inquiryValueId, inquiryValueDao)); } @Override protected long handleGetInquiryValueCount(AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); return this.getInquiryValueDao().getCount(trialId, active, activeSignup, probandId); } @Override protected long handleGetInquiryValueCount(AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup, Long probandId) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); return this.getInquiryValueDao().getCount(trialId, category, active, activeSignup, probandId); } @Override protected InquiryValuesOutVO handleGetInquiryValues( AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId, boolean sort, boolean loadAllJsValues, PSFVO psf) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); ProbandDao probandDao = this.getProbandDao(); ProbandOutVO probandVO = probandDao.toProbandOutVO(CheckIDUtil.checkProbandId(probandId, probandDao)); return ServiceUtil.getInquiryValues(trial, probandVO, active, activeSignup, Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION), loadAllJsValues, sort, psf, this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldSelectionSetValueDao()); } @Override protected InquiryValuesOutVO handleGetInquiryValues( AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup, Long probandId, boolean sort, boolean loadAllJsValues, PSFVO psf) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); ProbandDao probandDao = this.getProbandDao(); ProbandOutVO probandVO = probandDao.toProbandOutVO(CheckIDUtil.checkProbandId(probandId, probandDao)); return getInquiryValues(trial, category, probandVO, active, activeSignup, Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION), loadAllJsValues, sort, psf); } @Override protected MedicationOutVO handleGetMedication(AuthenticationVO auth, Long medicationId) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication medication = CheckIDUtil.checkMedicationId(medicationId, medicationDao); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); return result; } @Override protected long handleGetMedicationCount(AuthenticationVO auth, Long probandId, Long diagnosisId, Long procedureId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (diagnosisId != null) { CheckIDUtil.checkDiagnosisId(diagnosisId, this.getDiagnosisDao()); } if (procedureId != null) { CheckIDUtil.checkProcedureId(procedureId, this.getProcedureDao()); } return this.getMedicationDao().getCount(probandId, diagnosisId, procedureId); } @Override protected Collection<MedicationOutVO> handleGetMedicationList(AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } MedicationDao medicationDao = this.getMedicationDao(); Collection medications = medicationDao.findByProband(probandId, psf); medicationDao.toMedicationOutVOCollection(medications); return medications; } @Override protected MoneyTransferOutVO handleGetMoneyTransfer(AuthenticationVO auth, Long moneyTransferId) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); return result; } @Override protected long handleGetMoneyTransferCount(AuthenticationVO auth, Long probandId, Long bankAccountId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (bankAccountId != null) { CheckIDUtil.checkBankAccountId(bankAccountId, this.getBankAccountDao()); } return this.getMoneyTransferDao().getCount(null, probandId, bankAccountId, null, null, null); } @Override protected Collection<MoneyTransferOutVO> handleGetMoneyTransferList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, null, null, probandId, null, null, null, null, psf); moneyTransferDao.toMoneyTransferOutVOCollection(moneyTransfers); return moneyTransfers; } @Override protected String handleGetNewPaymentReference(AuthenticationVO auth, MoneyTransferInVO newMoneyTransfer) throws Exception { // TODO Auto-generated method stub return null; } @Override protected Collection<TrialOutVO> handleGetParticipationTrials(AuthenticationVO auth, Long probandId) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByParticipatingProbandSorted(probandId); trialDao.toTrialOutVOCollection(trials); return trials; } /** * @see org.phoenixctms.ctsms.service.proband.ProbandService#getProband(Long) */ @Override protected ProbandOutVO handleGetProband(AuthenticationVO auth, Long probandId, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); return result; } @Override protected ProbandAddressOutVO handleGetProbandAddress(AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); return result; } @Override protected long handleGetProbandAddressCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandAddressDao().getCount(probandId, null, null, null); } @Override protected Collection<ProbandAddressOutVO> handleGetProbandAddressList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandAddressDao addressDao = this.getProbandAddressDao(); Collection probandAddresses = addressDao.findByProband(probandId, null, null, null, psf); addressDao.toProbandAddressOutVOCollection(probandAddresses); return probandAddresses; } @Override protected ProbandContactDetailValueOutVO handleGetProbandContactDetailValue( AuthenticationVO auth, Long probandContactDetailValueId) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = CheckIDUtil.checkProbandContactDetailValueId(probandContactDetailValueId, contactValueDao); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); return result; } @Override protected long handleGetProbandContactDetailValueCount( AuthenticationVO auth, Long probandId, Boolean na) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandContactDetailValueDao().getCount(probandId, null, na, null, null); } @Override protected Collection<ProbandContactDetailValueOutVO> handleGetProbandContactDetailValueList( AuthenticationVO auth, Long probandId, Boolean na, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); Collection probandContactValues = contactValueDao.findByProband(probandId, null, na, null, null, psf); contactValueDao.toProbandContactDetailValueOutVOCollection(probandContactValues); return probandContactValues; } @Override protected Collection<ProbandGroupOutVO> handleGetProbandGroupList( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandGroupDao probandGroupDao = this.getProbandGroupDao(); Collection probandGroups = probandGroupDao.findByProbandSorted(probandId); probandGroupDao.toProbandGroupOutVOCollection(probandGroups); return probandGroups; } @Override protected ProbandImageOutVO handleGetProbandImage(AuthenticationVO auth, Long probandId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandImageOutVO result = probandDao.toProbandImageOutVO(proband); return result; } @Override protected long handleGetProbandInventoryBookingCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getInventoryBookingDao().getCount(null, probandId, null, null, null); } @Override protected Collection<InventoryBookingOutVO> handleGetProbandInventoryBookingList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } InventoryBookingDao inventoryBookingDao = this.getInventoryBookingDao(); Collection inventoryBookings = inventoryBookingDao.findByProband(probandId, psf); inventoryBookingDao.toInventoryBookingOutVOCollection(inventoryBookings); return inventoryBookings; } @Override protected Collection<ProbandOutVO> handleGetProbandList(AuthenticationVO auth, Long probandId, Long departmentId, Integer maxInstances, PSFVO psf) throws Exception { ProbandDao probandDao = this.getProbandDao(); if (probandId != null) { CheckIDUtil.checkProbandId(probandId, probandDao); } if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } Collection probands = probandDao.findByIdDepartment(probandId, departmentId, psf); ArrayList<ProbandOutVO> result = new ArrayList<ProbandOutVO>(probands.size()); Iterator<Proband> probandIt = probands.iterator(); while (probandIt.hasNext()) { result.add(probandDao.toProbandOutVO(probandIt.next(), maxInstances)); } return result; } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatus( AuthenticationVO auth, Date now, Long probandId, Long departmentId, Long probandCategoryId, Boolean probandActive, Boolean hideAvailability, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findProbandStatus(CommonUtil.dateToTimestamp(now), probandId, departmentId, probandCategoryId, probandActive, hideAvailability, psf); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); return probandStatusEntries; } @Override protected ProbandStatusEntryOutVO handleGetProbandStatusEntry( AuthenticationVO auth, Long probandStatusEntryId) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, statusEntryDao); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); return result; } @Override protected long handleGetProbandStatusEntryCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandStatusEntryDao().getCount(probandId); } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatusEntryInterval(AuthenticationVO auth, Long departmentId, Long probandCategoryId, Boolean hideAvailability, Date from, Date to, boolean sort) throws Exception { if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findByDepartmentCategoryInterval(departmentId, probandCategoryId, CommonUtil.dateToTimestamp(from), CommonUtil.dateToTimestamp(to), null, null, hideAvailability); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); if (sort) { probandStatusEntries = new ArrayList(probandStatusEntries); Collections.sort((ArrayList) probandStatusEntries, new ProbandStatusEntryIntervalComparator(false)); } return probandStatusEntries; } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatusEntryList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findByProband(probandId, psf); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); return probandStatusEntries; } @Override protected ProbandTagValueOutVO handleGetProbandTagValue(AuthenticationVO auth, Long probandTagValueId) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = CheckIDUtil.checkProbandTagValueId(probandTagValueId, tagValueDao); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); return result; } @Override protected long handleGetProbandTagValueCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandTagValueDao().getCount(probandId); } @Override protected Collection<ProbandTagValueOutVO> handleGetProbandTagValueList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); Collection probandTagValues = tagValueDao.findByProband(probandId, psf); tagValueDao.toProbandTagValueOutVOCollection(probandTagValues); return probandTagValues; } @Override protected ProcedureOutVO handleGetProcedure(AuthenticationVO auth, Long procedureId) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = CheckIDUtil.checkProcedureId(procedureId, procedureDao); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); return result; } @Override protected long handleGetProcedureCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProcedureDao().getCount(probandId); } @Override protected Collection<ProcedureOutVO> handleGetProcedureList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProcedureDao procedureDao = this.getProcedureDao(); Collection procedures = procedureDao.findByProband(probandId, psf); procedureDao.toProcedureOutVOCollection(procedures); return procedures; } @Override protected Collection<TrialOutVO> handleGetReimbursementTrials(AuthenticationVO auth, Long probandId, String costType, PaymentMethod method, Boolean paid) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByReimbursementProbandSorted(probandId, method, costType, paid); trialDao.toTrialOutVOCollection(trials); return trials; } @Override protected ProbandAddressOutVO handleGetWireTransferProbandAddress(AuthenticationVO auth, Long probandId) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); return probandAddressDao.toProbandAddressOutVO(probandAddressDao.findByProbandWireTransfer(probandId)); } @Override protected InquiriesPDFVO handleRenderInquiries(AuthenticationVO auth, Long trialId, Long probandId, Boolean active, Boolean activeSignup, boolean blank) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = null; TrialOutVO trialVO = null; Collection<Trial> trials = new ArrayList<Trial>(); if (trialId != null) { trial = CheckIDUtil.checkTrialId(trialId, trialDao); trialVO = trialDao.toTrialOutVO(trial); trials.add(trial); } else { trials = trialDao.findByInquiryValuesProbandSorted(null, probandId, active, activeSignup); } InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, active, activeSignup, blank, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { ServiceUtil.logSystemMessage(trial, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); } ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), trial != null ? SystemMessageCodes.INQUIRY_PDF_RENDERED : SystemMessageCodes.INQUIRIES_PDF_RENDERED, result, null, journalEntryDao); return result; } @Override protected InquiriesPDFVO handleRenderInquiriesSignup(AuthenticationVO auth, Long departmentId, Long probandId, Boolean activeSignup) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); Department department = null; if (departmentId != null) { department = CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } Collection<Trial> trials = new ArrayList<Trial>(); Iterator<Trial> trialIt = this.getTrialDao().findBySignup(department != null ? department.getId() : null, true, null).iterator(); while (trialIt.hasNext()) { Trial trial = trialIt.next(); if (this.getInquiryValueDao().getCount(trial.getId(), null, activeSignup, proband.getId()) > 0) { trials.add(trial); } } InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, null, activeSignup, false, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); ServiceUtil.logSystemMessage(proband, (TrialOutVO) null, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRIES_SIGNUP_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected InquiriesPDFVO handleRenderInquiry(AuthenticationVO auth, Long trialId, String category, Long probandId, Boolean active, Boolean activeSignup, boolean blank) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = CheckIDUtil.checkTrialId(trialId, trialDao); TrialOutVO trialVO = trialDao.toTrialOutVO(trial); Collection<Trial> trials = new ArrayList<Trial>(); trials.add(trial); InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, active, activeSignup, blank, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(trial, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); return result; } @Override protected ProbandLetterPDFVO handleRenderProbandLetterPDF( AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, probandAddressDao); if (!address.getProband().isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LETTER_NOT_FOR_ANIMAL_ENTRIES); } ProbandAddressOutVO addressVO = probandAddressDao.toProbandAddressOutVO(address); ProbandLetterPDFPainter painter = ServiceUtil.createProbandLetterPDFPainter(addressVO); User user = CoreUtil.getUser(); painter.getPdfVO().setRequestingUser(this.getUserDao().toUserOutVO(user)); (new PDFImprinter(painter, painter)).render(); ProbandLetterPDFVO result = painter.getPdfVO(); logSystemMessage(address.getProband(), addressVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), user, SystemMessageCodes.PROBAND_ADDRESS_PROBAND_LETTER_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandLetterPDFVO handleRenderProbandLettersPDF( AuthenticationVO auth, Long probandId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LETTER_NOT_FOR_ANIMAL_ENTRIES); } ArrayList<ProbandOutVO> probandVOs = new ArrayList<ProbandOutVO>(); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); probandVOs.add(probandVO); ProbandLetterPDFPainter painter = ServiceUtil.createProbandLetterPDFPainter(probandVOs, this.getProbandAddressDao()); User user = CoreUtil.getUser(); painter.getPdfVO().setRequestingUser(this.getUserDao().toUserOutVO(user)); (new PDFImprinter(painter, painter)).render(); ProbandLetterPDFVO result = painter.getPdfVO(); ServiceUtil.logSystemMessage(proband, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), user, SystemMessageCodes.PROBAND_LETTER_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleResetAutoDeleteDeadline( AuthenticationVO auth, Long probandId, Long version) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); ServiceUtil.resetAutoDeleteDeadline(proband, now); probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_AUTO_DELETE_DEADLINE_RESET, result, original, this.getJournalEntryDao()); return result; } @Override protected Collection<MoneyTransferOutVO> handleSetAllMoneyTransfersPaid( AuthenticationVO auth, Long probandId, Long trialId, boolean paid) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = null; if (trialId != null) { trial = CheckIDUtil.checkTrialId(trialId, trialDao); } JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection<MoneyTransfer> moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, trial == null ? null : trial.getId(), null, proband.getId(), null, null, !paid, null, null); if (moneyTransfers.size() == 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PAID_NOT_CHANGED); } ArrayList<MoneyTransferOutVO> results = new ArrayList<MoneyTransferOutVO>(moneyTransfers.size()); Iterator<MoneyTransfer> moneyTransfersIt = moneyTransfers.iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } Trial moneyTransferTrial = moneyTransfer.getTrial(); if (moneyTransferTrial != null) { ServiceUtil.checkTrialLocked(moneyTransferTrial); } moneyTransfer.setPaid(paid); CoreUtil.modifyVersion(moneyTransfer, moneyTransfer, now, user); moneyTransferDao.update(moneyTransfer); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (moneyTransferTrial != null) { logSystemMessage(moneyTransferTrial, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); } ServiceUtil.logSystemMessage(moneyTransfer.getProband(), original.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); results.add(result); } return results; } @Override protected InquiryValuesOutVO handleSetInquiryValues( AuthenticationVO auth, Set<InquiryValueInVO> inquiryValuesIn, boolean force) throws Exception { Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); InquiryValuesOutVO result = new InquiryValuesOutVO(); ServiceException firstException = null; HashMap<Long, String> errorMessagesMap = new HashMap<Long, String>(); Proband proband = null; if (inquiryValuesIn != null && inquiryValuesIn.size() > 0) { Trial trial = null; ArrayList<InquiryValueOutVO> inquiryValues = new ArrayList<InquiryValueOutVO>(inquiryValuesIn.size()); ArrayList<InquiryValueJsonVO> jsInquiryValues = null; if (Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { jsInquiryValues = new ArrayList<InquiryValueJsonVO>(inquiryValuesIn.size()); } Iterator<InquiryValueInVO> inquiryValuesInIt = inquiryValuesIn.iterator(); while (inquiryValuesInIt.hasNext()) { InquiryValueInVO inquiryValueIn = inquiryValuesInIt.next(); Inquiry inquiry = CheckIDUtil.checkInquiryId(inquiryValueIn.getInquiryId(), this.getInquiryDao()); if (trial == null) { trial = inquiry.getTrial(); ServiceUtil.checkTrialLocked(trial); if (!trial.getStatus().isInquiryValueInputEnabled()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUE_INPUT_DISABLED_FOR_TRIAL, CommonUtil.trialOutVOToString(this.getTrialDao().toTrialOutVO(trial))); } } else if (!trial.equals(inquiry.getTrial())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUES_FOR_DIFFERENT_TRIALS); } if (proband == null) { proband = CheckIDUtil.checkProbandId(inquiryValueIn.getProbandId(), this.getProbandDao(), LockMode.PESSIMISTIC_WRITE); ServiceUtil.checkProbandLocked(proband); } else if (!proband.getId().equals(inquiryValueIn.getProbandId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUES_FOR_DIFFERENT_PROBANDS); } try { addUpdateInquiryValue(inquiryValueIn, proband, inquiry, now, user, force, Settings.getBoolean(SettingCodes.LOG_INQUIRY_VALUE_TRIAL, Bundle.SETTINGS, DefaultSettings.LOG_INQUIRY_VALUE_TRIAL), Settings.getBoolean(SettingCodes.LOG_INQUIRY_VALUE_PROBAND, Bundle.SETTINGS, DefaultSettings.LOG_INQUIRY_VALUE_PROBAND), inquiryValues, jsInquiryValues); } catch (ServiceException e) { if (firstException == null) { firstException = e; } errorMessagesMap.put(inquiry.getId(), e.getMessage()); } } if (firstException != null) { firstException.setData(errorMessagesMap); throw firstException; } Collections.sort(inquiryValues, new InquiryValueOutVOComparator()); result.setPageValues(inquiryValues); if (jsInquiryValues != null) { result.setJsValues(jsInquiryValues); } } return result; } @Override protected MoneyTransferOutVO handleSetMoneyTransferPaid( AuthenticationVO auth, Long moneyTransferId, Long version, boolean paid) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } ProbandDao probandDao = this.getProbandDao(); Proband proband = moneyTransfer.getProband(); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } probandDao.lock(proband, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); } if (paid == original.getPaid()) { // unboxed, ok throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PAID_NOT_CHANGED); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(moneyTransfer, version.longValue(), now, user); moneyTransfer.setPaid(paid); moneyTransferDao.update(moneyTransfer); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); } ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); return result; } @Override protected ProbandAddressOutVO handleSetProbandAddressWireTransfer( AuthenticationVO auth, Long probandAddressId, Long version) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); Proband proband = address.getProband(); this.getProbandDao().lock(proband, LockMode.PESSIMISTIC_WRITE); ProbandAddressOutVO original = addressDao.toProbandAddressOutVO(address); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } ServiceUtil.checkProbandLocked(proband); if (address.isWireTransfer()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ADDRESS_WIRE_TRANSFER_NOT_CHANGED); } JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); Iterator<ProbandAddress> addressesIt = addressDao.findByProband(proband.getId(), null, null, true, null).iterator(); while (addressesIt.hasNext()) { ProbandAddress oldWireTransferAddress = addressesIt.next(); ProbandAddressOutVO oldWireTransferAddressOriginal = addressDao.toProbandAddressOutVO(address); oldWireTransferAddress.setWireTransfer(false); CoreUtil.modifyVersion(oldWireTransferAddress, oldWireTransferAddress, now, user); addressDao.update(oldWireTransferAddress); ProbandAddressOutVO oldWireTransferAddressResult = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(oldWireTransferAddress.getProband(), oldWireTransferAddressOriginal.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_WIRE_TRANSFER_UNSET, oldWireTransferAddressResult, oldWireTransferAddressOriginal, journalEntryDao); } address.setWireTransfer(true); CoreUtil.modifyVersion(address, version.longValue(), now, user); addressDao.update(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_WIRE_TRANSFER_SET, result, original, journalEntryDao); return result; } @Override protected ProbandImageOutVO handleSetProbandImage(AuthenticationVO auth, ProbandImageInVO probandImage) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband originalProband = CheckIDUtil.checkProbandId(probandImage.getId(), probandDao); ProbandImageOutVO original = probandDao.toProbandImageOutVO(originalProband); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(originalProband); checkProbandImageInput(probandImage); boolean hasImage = original.getHasImage(); boolean cleared = probandImage.getDatas() == null || probandImage.getDatas().length == 0; probandDao.evict(originalProband); Proband proband = probandDao.probandImageInVOToEntity(probandImage); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalProband, proband, now, user); probandDao.update(proband); ProbandImageOutVO result = probandDao.toProbandImageOutVO(proband); ServiceUtil.logSystemMessage(proband, probandDao.toProbandOutVO(proband), now, user, cleared ? SystemMessageCodes.PROBAND_IMAGE_CLEARED : hasImage ? SystemMessageCodes.PROBAND_IMAGE_UPDATED : SystemMessageCodes.PROBAND_IMAGE_CREATED, result, original, this.getJournalEntryDao()); return result; } @Override protected BankAccountOutVO handleUpdateBankAccount( AuthenticationVO auth, BankAccountInVO modifiedBankAccount) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount originalBankAccount = CheckIDUtil.checkBankAccountId(modifiedBankAccount.getId(), bankAccountDao); BankAccountOutVO original = bankAccountDao.toBankAccountOutVO(originalBankAccount); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_BANK_ACCOUNT); } checkBankAccountInput(modifiedBankAccount); if (!modifiedBankAccount.getProbandId().equals(originalBankAccount.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_PROBAND_CHANGED); } bankAccountDao.evict(originalBankAccount); BankAccount bankAccount = bankAccountDao.bankAccountInVOToEntity(modifiedBankAccount); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalBankAccount, bankAccount, now, user); bankAccountDao.update(bankAccount); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); ServiceUtil .logSystemMessage(bankAccount.getProband(), result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected DiagnosisOutVO handleUpdateDiagnosis(AuthenticationVO auth, DiagnosisInVO modifiedDiagnosis) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis originalDiagnosis = CheckIDUtil.checkDiagnosisId(modifiedDiagnosis.getId(), diagnosisDao); DiagnosisOutVO original = diagnosisDao.toDiagnosisOutVO(originalDiagnosis); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_DIAGNOSIS); } checkDiagnosisInput(modifiedDiagnosis); diagnosisDao.evict(originalDiagnosis); Diagnosis diagnosis = diagnosisDao.diagnosisInVOToEntity(modifiedDiagnosis); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalDiagnosis, diagnosis, now, user); diagnosisDao.update(diagnosis); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); ServiceUtil.logSystemMessage(diagnosis.getProband(), result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected MedicationOutVO handleUpdateMedication(AuthenticationVO auth, MedicationInVO modifiedMedication) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication originalMedication = CheckIDUtil.checkMedicationId(modifiedMedication.getId(), medicationDao); MedicationOutVO original = medicationDao.toMedicationOutVO(originalMedication); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MEDICATION); } checkMedicationInput(modifiedMedication); if (!modifiedMedication.getProbandId().equals(originalMedication.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_PROBAND_CHANGED); } medicationDao.evict(originalMedication); Medication medication = medicationDao.medicationInVOToEntity(modifiedMedication); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalMedication, medication, now, user); medicationDao.update(medication); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(medication.getProband(), result.getProband(), now, user, SystemMessageCodes.MEDICATION_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleUpdateMoneyTransfer( AuthenticationVO auth, MoneyTransferInVO modifiedMoneyTransfer, Long maxAllowedCostTypes) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer originalMoneyTransfer = CheckIDUtil.checkMoneyTransferId(modifiedMoneyTransfer.getId(), moneyTransferDao); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(originalMoneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } checkMoneyTransferInput(modifiedMoneyTransfer, maxAllowedCostTypes); if (!modifiedMoneyTransfer.getProbandId().equals(originalMoneyTransfer.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_CHANGED); } moneyTransferDao.evict(originalMoneyTransfer); MoneyTransfer moneyTransfer = moneyTransferDao.moneyTransferInVOToEntity(modifiedMoneyTransfer); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalMoneyTransfer, moneyTransfer, now, user); moneyTransferDao.update(moneyTransfer); Trial trial = moneyTransfer.getTrial(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_UPDATED, result, original, this.getJournalEntryDao()); } ServiceUtil.logSystemMessage(moneyTransfer.getProband(), result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleUpdatePrivacyConsentStatus( AuthenticationVO auth, Long probandId, Long version, Long privacyConsentStatusTypeId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); PrivacyConsentStatusTypeDao privacyConsentStatusTypeDao = this.getPrivacyConsentStatusTypeDao(); PrivacyConsentStatusType state = CheckIDUtil.checkPrivacyConsentStatusTypeId(privacyConsentStatusTypeId, privacyConsentStatusTypeDao); ServiceUtil.checkProbandLocked(proband); boolean validState = false; Iterator<PrivacyConsentStatusType> statesIt = privacyConsentStatusTypeDao.findTransitions(proband.getPrivacyConsentStatus().getId()).iterator(); while (statesIt.hasNext()) { if (state.equals(statesIt.next())) { validState = true; break; } } if (!validState) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_NEW_PRIVACY_CONSENT_STATUS_TYPE, L10nUtil.getPrivacyConsentStatusTypeName(Locales.USER, state.getNameL10nKey())); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); proband.setPrivacyConsentStatus(state); probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PRIVACY_CONSENT_STATUS_TYPE_UPDATED, result, original, this.getJournalEntryDao()); return probandDao.toProbandOutVO(proband); } @Override protected ProbandOutVO handleUpdateProband(AuthenticationVO auth, ProbandInVO modifiedProband, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); User user = CoreUtil.getUser(); this.getUserDao().lock(user, LockMode.PESSIMISTIC_WRITE); Proband originalProband = CheckIDUtil.checkProbandId(modifiedProband.getId(), probandDao, LockMode.PESSIMISTIC_WRITE); ProbandOutVO original = probandDao.toProbandOutVO(originalProband, maxInstances, maxParentsDepth, maxChildrenDepth); if (modifiedProband.getBlinded()) { if (!user.getDepartment().getId().equals(modifiedProband.getDepartmentId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } if (!modifiedProband.getDepartmentId().equals(originalProband.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_CHANGED); } } else { if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } checkProbandInput(modifiedProband); if (originalProband.isPerson() != modifiedProband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PERSON_FLAG_CHANGED); } boolean originalPrivacyConsentControl = originalProband.getCategory().isPrivacyConsentControl(); probandDao.evict(originalProband); Proband proband = probandDao.probandInVOToEntity(modifiedProband); checkProbandLoop(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); CoreUtil.modifyVersion(originalProband, proband, now, user); if (!originalPrivacyConsentControl && proband.getCategory().isPrivacyConsentControl()) { ServiceUtil.resetAutoDeleteDeadline(proband, now); proband.setPrivacyConsentStatus(this.getPrivacyConsentStatusTypeDao().findInitialStates().iterator().next()); } probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); Staff physician = proband.getPhysician(); if (physician != null) { ServiceUtil.logSystemMessage(physician, result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); } Iterator<ProbandOutVO> parentsIt = original.getParents().iterator(); while (parentsIt.hasNext()) { ProbandOutVO parent = parentsIt.next(); ServiceUtil.logSystemMessage(probandDao.load(parent.getId()), result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); } return result; } @Override protected ProbandAddressOutVO handleUpdateProbandAddress( AuthenticationVO auth, ProbandAddressInVO modifiedProbandAddress) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress originalAddress = CheckIDUtil.checkProbandAddressId(modifiedProbandAddress.getId(), addressDao); ProbandAddressOutVO original = addressDao.toProbandAddressOutVO(originalAddress); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } checkProbandAddressInput(modifiedProbandAddress); addressDao.evict(originalAddress); ProbandAddress address = addressDao.probandAddressInVOToEntity(modifiedProbandAddress); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalAddress, address, now, user); addressDao.update(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(address.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleUpdateProbandCategory( AuthenticationVO auth, Long probandId, Long version, Long categoryId, String comment) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ProbandCategory category = CheckIDUtil.checkProbandCategoryId(categoryId, this.getProbandCategoryDao()); if (proband.isPerson()) { if (!category.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } } else { if (!category.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); proband.setCategory(category); if (proband.isPerson()) { ProbandContactParticulars personParticulars = proband.getPersonParticulars(); if (personParticulars != null) { CipherText cipherText = CryptoUtil.encryptValue(comment); personParticulars.setCommentIv(cipherText.getIv()); personParticulars.setEncryptedComment(cipherText.getCipherText()); personParticulars.setCommentHash(CryptoUtil.hashForSearch(comment)); } } else { AnimalContactParticulars animalParticulars = proband.getAnimalParticulars(); if (animalParticulars != null) { animalParticulars.setComment(comment); } } probandDao.update(proband); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_CATEGORY_UPDATED, result, original, this.getJournalEntryDao()); return probandDao.toProbandOutVO(proband); } @Override protected ProbandContactDetailValueOutVO handleUpdateProbandContactDetailValue( AuthenticationVO auth, ProbandContactDetailValueInVO modifiedProbandContactDetailValue) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue originalContactValue = CheckIDUtil.checkProbandContactDetailValueId(modifiedProbandContactDetailValue.getId(), contactValueDao); ProbandContactDetailValueOutVO original = contactValueDao.toProbandContactDetailValueOutVO(originalContactValue); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_CONTACT_DETAIL_VALUE); } checkProbandContactDetailValueInput(modifiedProbandContactDetailValue); contactValueDao.evict(originalContactValue); ProbandContactDetailValue contactValue = contactValueDao.probandContactDetailValueInVOToEntity(modifiedProbandContactDetailValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalContactValue, contactValue, now, user); contactValueDao.update(contactValue); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); ServiceUtil.logSystemMessage(contactValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleUpdateProbandStatusEntry( AuthenticationVO auth, ProbandStatusEntryInVO modifiedProbandStatusEntry) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry originalStatusEntry = CheckIDUtil.checkProbandStatusEntryId(modifiedProbandStatusEntry.getId(), statusEntryDao); ProbandStatusEntryOutVO original = statusEntryDao.toProbandStatusEntryOutVO(originalStatusEntry); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_STATUS_ENTRY); } checkProbandStatusEntryInput(modifiedProbandStatusEntry); statusEntryDao.evict(originalStatusEntry); ProbandStatusEntry statusEntry = statusEntryDao.probandStatusEntryInVOToEntity(modifiedProbandStatusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalStatusEntry, statusEntry, now, user); statusEntryDao.update(statusEntry); notifyProbandInactive(statusEntry, now); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); ServiceUtil.logSystemMessage(statusEntry.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleUpdateProbandTagValue( AuthenticationVO auth, ProbandTagValueInVO modifiedProbandTagValue) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue originalTagValue = CheckIDUtil.checkProbandTagValueId(modifiedProbandTagValue.getId(), tagValueDao); ProbandTagValueOutVO original = tagValueDao.toProbandTagValueOutVO(originalTagValue); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_TAG_VALUE); } checkProbandTagValueInput(modifiedProbandTagValue); tagValueDao.evict(originalTagValue); ProbandTagValue tagValue = tagValueDao.probandTagValueInVOToEntity(modifiedProbandTagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalTagValue, tagValue, now, user); tagValueDao.update(tagValue); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); ServiceUtil.logSystemMessage(tagValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleUpdateProcedure(AuthenticationVO auth, ProcedureInVO modifiedProcedure) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure originalProcedure = CheckIDUtil.checkProcedureId(modifiedProcedure.getId(), procedureDao); ProcedureOutVO original = procedureDao.toProcedureOutVO(originalProcedure); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROCEDURE); } checkProcedureInput(modifiedProcedure); procedureDao.evict(originalProcedure); Procedure procedure = procedureDao.procedureInVOToEntity(modifiedProcedure); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalProcedure, procedure, now, user); procedureDao.update(procedure); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); ServiceUtil.logSystemMessage(procedure.getProband(), result.getProband(), now, user, SystemMessageCodes.PROCEDURE_UPDATED, result, original, this.getJournalEntryDao()); return result; } private void notifyProbandInactive(ProbandStatusEntry statusEntry, Date now) throws Exception { NotificationDao notificationDao = this.getNotificationDao(); ServiceUtil.cancelNotifications(statusEntry.getNotifications(), notificationDao, null); // clears inventory_active AND inventory inactive booking notifications if (!statusEntry.getType().isProbandActive()) { if ((new DateInterval(statusEntry.getStart(), statusEntry.getStop())).contains(now)) { notificationDao.addNotification(statusEntry, now, null); } if (!(new DateInterval(statusEntry.getStart(), statusEntry.getStop())).isOver(now)) { VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Proband proband = statusEntry.getProband(); Iterator<ProbandListEntry> trialParticipationsIt = proband.getTrialParticipations().iterator(); while (trialParticipationsIt.hasNext()) { ProbandListEntry probandListEntry = trialParticipationsIt.next(); ProbandGroup probandGroup = probandListEntry.getGroup(); if (probandGroup != null) { Iterator<VisitScheduleItem> it = visitScheduleItemDao .findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), proband.getId(), statusEntry.getStart(), statusEntry.getStop()) .iterator(); while (it.hasNext()) { notificationDao.addNotification(it.next(), proband, statusEntry, now, null); } } } } } } @Override protected ProbandOutVO handleUpdateProbandDepartment(AuthenticationVO auth, Long probandId, Long newDepartmentId, String plainNewDepartmentPassword, String plainOldDepartmentPassword) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); Department newDepartment = CheckIDUtil.checkDepartmentId(newDepartmentId, this.getDepartmentDao()); if (plainNewDepartmentPassword == null) { plainNewDepartmentPassword = CoreUtil.getUserContext().getPlainDepartmentPassword(); } if (plainOldDepartmentPassword == null) { plainOldDepartmentPassword = CoreUtil.getUserContext().getPlainDepartmentPassword(); } Department oldDepartment = proband.getDepartment(); if (!oldDepartment.equals(newDepartment)) { if (!CryptoUtil.checkDepartmentPassword(newDepartment, plainNewDepartmentPassword)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DEPARTMENT_PASSWORD_WRONG); } if (!CryptoUtil.checkDepartmentPassword(oldDepartment, plainOldDepartmentPassword)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.OLD_DEPARTMENT_PASSWORD_WRONG); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); SecretKey newDepartmentKey = ReEncrypter.getDepartmenKey(newDepartment, plainNewDepartmentPassword); SecretKey oldDepartmentKey = ReEncrypter.getDepartmenKey(oldDepartment, plainOldDepartmentPassword); ProbandOutVO original = probandDao.toProbandOutVO(proband); probandDao.reEncrypt(proband, oldDepartmentKey, newDepartmentKey); proband.setDepartment(newDepartment); CoreUtil.modifyVersion(proband, proband.getVersion(), now, user); probandDao.update(proband); ProbandOutVO result = probandDao.toProbandOutVO(proband); ProbandTagValueDao probandTagValueDao = this.getProbandTagValueDao(); Iterator<ProbandTagValue> tagValuesIt = proband.getTagValues().iterator(); while (tagValuesIt.hasNext()) { ProbandTagValue tagValue = tagValuesIt.next(); probandTagValueDao.reEncrypt(tagValue, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(tagValue, tagValue.getVersion(), now, user); probandTagValueDao.update(tagValue); } ProbandContactDetailValueDao probandContactDetailValueDao = this.getProbandContactDetailValueDao(); Iterator<ProbandContactDetailValue> contactDetailValuesIt = proband.getContactDetailValues().iterator(); while (contactDetailValuesIt.hasNext()) { ProbandContactDetailValue contactDetailValue = contactDetailValuesIt.next(); probandContactDetailValueDao.reEncrypt(contactDetailValue, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(contactDetailValue, contactDetailValue.getVersion(), now, user); probandContactDetailValueDao.update(contactDetailValue); } ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); Iterator<ProbandAddress> addressesIt = proband.getAddresses().iterator(); while (addressesIt.hasNext()) { ProbandAddress address = addressesIt.next(); probandAddressDao.reEncrypt(address, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(address, address.getVersion(), now, user); probandAddressDao.update(address); } ProbandStatusEntryDao probandStatusEntryDao = this.getProbandStatusEntryDao(); Iterator<ProbandStatusEntry> statusEntriesIt = proband.getStatusEntries().iterator(); while (statusEntriesIt.hasNext()) { ProbandStatusEntry statusEntry = statusEntriesIt.next(); probandStatusEntryDao.reEncrypt(statusEntry, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(statusEntry, statusEntry.getVersion(), now, user); probandStatusEntryDao.update(statusEntry); } MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = proband.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); medicationDao.reEncrypt(medication, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); } DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Iterator<Diagnosis> diagnosesIt = proband.getDiagnoses().iterator(); while (diagnosesIt.hasNext()) { Diagnosis diagnosis = diagnosesIt.next(); diagnosisDao.reEncrypt(diagnosis, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(diagnosis, diagnosis.getVersion(), now, user); diagnosisDao.update(diagnosis); } ProcedureDao procedureDao = this.getProcedureDao(); Iterator<Procedure> proceduresIt = proband.getProcedures().iterator(); while (proceduresIt.hasNext()) { Procedure procedure = proceduresIt.next(); procedureDao.reEncrypt(procedure, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(procedure, procedure.getVersion(), now, user); procedureDao.update(procedure); } MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Iterator<MoneyTransfer> moneyTransfersIt = proband.getMoneyTransfers().iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); moneyTransferDao.reEncrypt(moneyTransfer, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(moneyTransfer, moneyTransfer.getVersion(), now, user); moneyTransferDao.update(moneyTransfer); } BankAccountDao bankAccountDao = this.getBankAccountDao(); Iterator<BankAccount> bankAccountIt = proband.getBankAccounts().iterator(); while (bankAccountIt.hasNext()) { BankAccount bankAccount = bankAccountIt.next(); bankAccountDao.reEncrypt(bankAccount, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(bankAccount, bankAccount.getVersion(), now, user); bankAccountDao.update(bankAccount); } //no re-encryption for proband list status entries, as those are encryted by the creating user // Iterator<ProbandListEntry> trialParticipationsIt = proband.getTrialParticipations().iterator(); // while (trialParticipationsIt.hasNext()) { // Iterator<ProbandListStatusEntry> probandListStatusEntriesIt = trialParticipationsIt.next().getStatusEntries().iterator(); // while (probandListStatusEntriesIt.hasNext()) { // ProbandListStatusEntry probandListStatusEntry = probandListStatusEntriesIt.next(); // probandListStatusEntryDao.reEncrypt(probandListStatusEntry, oldDepartmentKey, newDepartmentKey); // CoreUtil.modifyVersion(probandListStatusEntry,probandListStatusEntry.getVersion(), now, user); // probandListStatusEntryDao.update(probandListStatusEntry); // } // } MassMailRecipientDao massMailRecipientDao = this.getMassMailRecipientDao(); Iterator<MassMailRecipient> massMailReceiptsIt = proband.getMassMailReceipts().iterator(); while (massMailReceiptsIt.hasNext()) { MassMailRecipient recipient = massMailReceiptsIt.next(); massMailRecipientDao.reEncrypt(recipient, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(recipient, recipient.getVersion(), now, user); massMailRecipientDao.update(recipient); } //no re-encryption for journal entries, as those are encryted by the creating user //JournalEntryDao journalEntryDao = this.getJournalEntryDao(); //Iterator<JournalEntry> journalEntriesIt = proband.getJournalEntries().iterator(); //while (journalEntriesIt.hasNext()) { // JournalEntry journalEntry = journalEntriesIt.next(); // journalEntryDao.reEncrypt(journalEntry, oldDepartmentKey, newDepartmentKey); // CoreUtil.modifyVersion(journalEntry, journalEntry.getVersion(), now, user); // journalEntryDao.update(journalEntry); //} FileDao fileDao = this.getFileDao(); Iterator<File> filesIt = proband.getFiles().iterator(); while (filesIt.hasNext()) { File file = filesIt.next(); fileDao.reEncrypt(file, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(file, file.getVersion(), now, user); fileDao.update(file); } ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_DEPARTMENT_UPDATED, result, original, this.getJournalEntryDao()); return result; } else { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_CHANGED); } } }
core/src/main/java/org/phoenixctms/ctsms/service/proband/ProbandServiceImpl.java
// license-header java merge-point /** * This is only generated once! It will never be overwritten. * You can (and have to!) safely modify it by hand. * TEMPLATE: SpringServiceImpl.vsl in andromda-spring cartridge * MODEL CLASS: AndroMDAModel::ctsms::org.phoenixctms.ctsms::service::proband::ProbandService * STEREOTYPE: Service */ package org.phoenixctms.ctsms.service.proband; import java.awt.Dimension; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.crypto.SecretKey; import org.hibernate.LockMode; import org.phoenixctms.ctsms.adapt.DiagnosisCollisionFinder; import org.phoenixctms.ctsms.adapt.InquiryValueCollisionFinder; import org.phoenixctms.ctsms.adapt.MaxCostTypesAdapter; import org.phoenixctms.ctsms.adapt.MedicationCollisionFinder; import org.phoenixctms.ctsms.adapt.ProbandAddressTypeTagAdapter; import org.phoenixctms.ctsms.adapt.ProbandContactDetailTypeTagAdapter; import org.phoenixctms.ctsms.adapt.ProbandStatusEntryCollisionFinder; import org.phoenixctms.ctsms.adapt.ProbandTagAdapter; import org.phoenixctms.ctsms.adapt.ProcedureCollisionFinder; import org.phoenixctms.ctsms.compare.InquiryValueOutVOComparator; import org.phoenixctms.ctsms.compare.ProbandStatusEntryIntervalComparator; import org.phoenixctms.ctsms.domain.AlphaId; import org.phoenixctms.ctsms.domain.AnimalContactParticulars; import org.phoenixctms.ctsms.domain.Asp; import org.phoenixctms.ctsms.domain.AspDao; import org.phoenixctms.ctsms.domain.AspSubstance; import org.phoenixctms.ctsms.domain.AspSubstanceDao; import org.phoenixctms.ctsms.domain.BankAccount; import org.phoenixctms.ctsms.domain.BankAccountDao; import org.phoenixctms.ctsms.domain.Department; import org.phoenixctms.ctsms.domain.Diagnosis; import org.phoenixctms.ctsms.domain.DiagnosisDao; import org.phoenixctms.ctsms.domain.File; import org.phoenixctms.ctsms.domain.FileDao; import org.phoenixctms.ctsms.domain.InputField; import org.phoenixctms.ctsms.domain.InputFieldDao; import org.phoenixctms.ctsms.domain.InputFieldValue; import org.phoenixctms.ctsms.domain.Inquiry; import org.phoenixctms.ctsms.domain.InquiryDao; import org.phoenixctms.ctsms.domain.InquiryValue; import org.phoenixctms.ctsms.domain.InquiryValueDao; import org.phoenixctms.ctsms.domain.InventoryBookingDao; import org.phoenixctms.ctsms.domain.JournalEntry; import org.phoenixctms.ctsms.domain.JournalEntryDao; import org.phoenixctms.ctsms.domain.MassMailRecipient; import org.phoenixctms.ctsms.domain.MassMailRecipientDao; import org.phoenixctms.ctsms.domain.Medication; import org.phoenixctms.ctsms.domain.MedicationDao; import org.phoenixctms.ctsms.domain.MimeType; import org.phoenixctms.ctsms.domain.MoneyTransfer; import org.phoenixctms.ctsms.domain.MoneyTransferDao; import org.phoenixctms.ctsms.domain.NotificationDao; import org.phoenixctms.ctsms.domain.OpsCode; import org.phoenixctms.ctsms.domain.PrivacyConsentStatusType; import org.phoenixctms.ctsms.domain.PrivacyConsentStatusTypeDao; import org.phoenixctms.ctsms.domain.Proband; import org.phoenixctms.ctsms.domain.ProbandAddress; import org.phoenixctms.ctsms.domain.ProbandAddressDao; import org.phoenixctms.ctsms.domain.ProbandCategory; import org.phoenixctms.ctsms.domain.ProbandContactDetailValue; import org.phoenixctms.ctsms.domain.ProbandContactDetailValueDao; import org.phoenixctms.ctsms.domain.ProbandContactParticulars; import org.phoenixctms.ctsms.domain.ProbandDao; import org.phoenixctms.ctsms.domain.ProbandGroup; import org.phoenixctms.ctsms.domain.ProbandGroupDao; import org.phoenixctms.ctsms.domain.ProbandListEntry; import org.phoenixctms.ctsms.domain.ProbandStatusEntry; import org.phoenixctms.ctsms.domain.ProbandStatusEntryDao; import org.phoenixctms.ctsms.domain.ProbandStatusType; import org.phoenixctms.ctsms.domain.ProbandTagValue; import org.phoenixctms.ctsms.domain.ProbandTagValueDao; import org.phoenixctms.ctsms.domain.Procedure; import org.phoenixctms.ctsms.domain.ProcedureDao; import org.phoenixctms.ctsms.domain.Staff; import org.phoenixctms.ctsms.domain.Trial; import org.phoenixctms.ctsms.domain.TrialDao; import org.phoenixctms.ctsms.domain.User; import org.phoenixctms.ctsms.domain.VisitScheduleItem; import org.phoenixctms.ctsms.domain.VisitScheduleItemDao; import org.phoenixctms.ctsms.enumeration.FileModule; import org.phoenixctms.ctsms.enumeration.JournalModule; import org.phoenixctms.ctsms.enumeration.PaymentMethod; import org.phoenixctms.ctsms.enumeration.Sex; import org.phoenixctms.ctsms.enumeration.VariablePeriod; import org.phoenixctms.ctsms.excel.VisitScheduleExcelWriter; import org.phoenixctms.ctsms.exception.ServiceException; import org.phoenixctms.ctsms.pdf.PDFImprinter; import org.phoenixctms.ctsms.pdf.ProbandLetterPDFPainter; import org.phoenixctms.ctsms.security.CipherText; import org.phoenixctms.ctsms.security.CryptoUtil; import org.phoenixctms.ctsms.security.reencrypt.ReEncrypter; import org.phoenixctms.ctsms.util.CheckIDUtil; import org.phoenixctms.ctsms.util.CommonUtil; import org.phoenixctms.ctsms.util.CoreUtil; import org.phoenixctms.ctsms.util.DefaultSettings; import org.phoenixctms.ctsms.util.L10nUtil; import org.phoenixctms.ctsms.util.L10nUtil.Locales; import org.phoenixctms.ctsms.util.ServiceExceptionCodes; import org.phoenixctms.ctsms.util.ServiceUtil; import org.phoenixctms.ctsms.util.SettingCodes; import org.phoenixctms.ctsms.util.Settings; import org.phoenixctms.ctsms.util.Settings.Bundle; import org.phoenixctms.ctsms.util.SystemMessageCodes; import org.phoenixctms.ctsms.util.date.DateCalc; import org.phoenixctms.ctsms.util.date.DateInterval; import org.phoenixctms.ctsms.vo.AuthenticationVO; import org.phoenixctms.ctsms.vo.BankAccountInVO; import org.phoenixctms.ctsms.vo.BankAccountOutVO; import org.phoenixctms.ctsms.vo.DiagnosisInVO; import org.phoenixctms.ctsms.vo.DiagnosisOutVO; import org.phoenixctms.ctsms.vo.InquiriesPDFVO; import org.phoenixctms.ctsms.vo.InquiryValueInVO; import org.phoenixctms.ctsms.vo.InquiryValueJsonVO; import org.phoenixctms.ctsms.vo.InquiryValueOutVO; import org.phoenixctms.ctsms.vo.InquiryValuesOutVO; import org.phoenixctms.ctsms.vo.InventoryBookingOutVO; import org.phoenixctms.ctsms.vo.MedicationInVO; import org.phoenixctms.ctsms.vo.MedicationOutVO; import org.phoenixctms.ctsms.vo.MoneyTransferInVO; import org.phoenixctms.ctsms.vo.MoneyTransferOutVO; import org.phoenixctms.ctsms.vo.PSFVO; import org.phoenixctms.ctsms.vo.ProbandAddressInVO; import org.phoenixctms.ctsms.vo.ProbandAddressOutVO; import org.phoenixctms.ctsms.vo.ProbandContactDetailValueInVO; import org.phoenixctms.ctsms.vo.ProbandContactDetailValueOutVO; import org.phoenixctms.ctsms.vo.ProbandGroupOutVO; import org.phoenixctms.ctsms.vo.ProbandImageInVO; import org.phoenixctms.ctsms.vo.ProbandImageOutVO; import org.phoenixctms.ctsms.vo.ProbandInVO; import org.phoenixctms.ctsms.vo.ProbandLetterPDFVO; import org.phoenixctms.ctsms.vo.ProbandOutVO; import org.phoenixctms.ctsms.vo.ProbandStatusEntryInVO; import org.phoenixctms.ctsms.vo.ProbandStatusEntryOutVO; import org.phoenixctms.ctsms.vo.ProbandTagValueInVO; import org.phoenixctms.ctsms.vo.ProbandTagValueOutVO; import org.phoenixctms.ctsms.vo.ProcedureInVO; import org.phoenixctms.ctsms.vo.ProcedureOutVO; import org.phoenixctms.ctsms.vo.ReimbursementsExcelVO; import org.phoenixctms.ctsms.vo.TrialOutVO; import org.phoenixctms.ctsms.vo.VisitScheduleExcelVO; import org.phoenixctms.ctsms.vo.VisitScheduleItemOutVO; import org.phoenixctms.ctsms.vocycle.ProbandReflexionGraph; /** * @see org.phoenixctms.ctsms.service.proband.ProbandService */ public class ProbandServiceImpl extends ProbandServiceBase { private static JournalEntry logSystemMessage(Proband proband, ProbandAddressOutVO addressVO, Timestamp now, User modified, String systemMessageCode, Object result, Object original, JournalEntryDao journalEntryDao) throws Exception { boolean journalEncrypted = CommonUtil.getUseJournalEncryption(JournalModule.PROBAND_JOURNAL, null); return journalEntryDao.addSystemMessage(proband, now, modified, systemMessageCode, journalEncrypted ? new Object[] { addressVO.getName() } : null, new Object[] { CoreUtil.getSystemMessageCommentContent(result, original, !journalEncrypted) }); } private static JournalEntry logSystemMessage(Trial trial, ProbandOutVO probandVO, Timestamp now, User modified, String systemMessageCode, Object result, Object original, JournalEntryDao journalEntryDao) throws Exception { boolean journalEncrypted = CommonUtil.getUseJournalEncryption(JournalModule.PROBAND_JOURNAL, null); return journalEntryDao.addSystemMessage(trial, now, modified, systemMessageCode, journalEncrypted ? new Object[] { CommonUtil.probandOutVOToString(probandVO) } : new Object[] { Long.toString(probandVO.getId()) }, new Object[] { CoreUtil.getSystemMessageCommentContent(result, original, !journalEncrypted) }); } private void addUpdateInquiryValue(InquiryValueInVO inquiryValueIn, Proband proband, Inquiry inquiry, Timestamp now, User user, boolean force, boolean logTrial, boolean logProband, ArrayList<InquiryValueOutVO> outInquiryValues, ArrayList<InquiryValueJsonVO> outJsInquiryValues) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); Long id = inquiryValueIn.getId(); InquiryValueOutVO result = null; InquiryValueJsonVO resultJs = null; JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (id == null) { if (inquiry.isDisabled()) { inquiryValueIn = ServiceUtil.createPresetInquiryInValue(inquiry, proband.getId(), this.getInputFieldSelectionSetValueDao()); } checkInquiryValueInput(inquiryValueIn, proband, inquiry); ServiceUtil.addAutocompleteSelectionSetValue(inquiry.getField(), inquiryValueIn.getTextValue(), now, user, this.getInputFieldSelectionSetValueDao(), journalEntryDao); InquiryValue inquiryValue = inquiryValueDao.inquiryValueInVOToEntity(inquiryValueIn); CoreUtil.modifyVersion(inquiryValue, now, user); InputFieldValue inputFieldValue = inquiryValue.getValue(); this.getInputFieldValueDao().create(inputFieldValue); inquiryValue = inquiryValueDao.create(inquiryValue); if (outInquiryValues != null || logTrial || logProband) { result = inquiryValueDao.toInquiryValueOutVO(inquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(inquiryValue); } if (logProband) { ServiceUtil.logSystemMessage(proband, result.getInquiry().getTrial(), now, user, SystemMessageCodes.INQUIRY_VALUE_CREATED, result, null, journalEntryDao); } if (logTrial) { ServiceUtil.logSystemMessage(inquiry.getTrial(), result.getProband(), now, user, SystemMessageCodes.INQUIRY_VALUE_CREATED, result, null, journalEntryDao); } } else { InquiryValue originalInquiryValue = CheckIDUtil.checkInquiryValueId(id, inquiryValueDao); if (!inquiry.isDisabled() && !ServiceUtil.inquiryValueEquals(inquiryValueIn, originalInquiryValue.getValue(), force)) { checkInquiryValueInput(inquiryValueIn, proband, inquiry); ServiceUtil.addAutocompleteSelectionSetValue(inquiry.getField(), inquiryValueIn.getTextValue(), now, user, this.getInputFieldSelectionSetValueDao(), journalEntryDao); InquiryValueOutVO original = null; if (logProband || logTrial) { original = inquiryValueDao.toInquiryValueOutVO(originalInquiryValue); } inquiryValueDao.evict(originalInquiryValue); InquiryValue inquiryValue = inquiryValueDao.inquiryValueInVOToEntity(inquiryValueIn); CoreUtil.modifyVersion(originalInquiryValue, inquiryValue, now, user); inquiryValueDao.update(inquiryValue); if (outInquiryValues != null || logTrial || logProband) { result = inquiryValueDao.toInquiryValueOutVO(inquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(inquiryValue); } if (logProband) { ServiceUtil.logSystemMessage(proband, result.getInquiry().getTrial(), now, user, SystemMessageCodes.INQUIRY_VALUE_UPDATED, result, original, journalEntryDao); } if (logTrial) { ServiceUtil .logSystemMessage(inquiry.getTrial(), result.getProband(), now, user, SystemMessageCodes.INQUIRY_VALUE_UPDATED, result, original, journalEntryDao); } } else { if (outInquiryValues != null) { result = inquiryValueDao.toInquiryValueOutVO(originalInquiryValue); } if (outJsInquiryValues != null && !CommonUtil.isEmptyString(inquiry.getJsVariableName())) { resultJs = inquiryValueDao.toInquiryValueJsonVO(originalInquiryValue); } } } if (outInquiryValues != null) { outInquiryValues.add(result); } if (resultJs != null) { outJsInquiryValues.add(resultJs); } } private void checkBankAccountInput(BankAccountInVO bankAccountIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(bankAccountIn.getProbandId(), probandDao); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); String iban = bankAccountIn.getIban(); String bic = bankAccountIn.getBic(); String accountNumber = bankAccountIn.getAccountNumber(); String bankCodeNumber = bankAccountIn.getBankCodeNumber(); if (bankAccountIn.getNa()) { if (bankAccountIn.getAccountHolderName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_HOLDER_NAME_NOT_NULL); } if (bankAccountIn.getBankName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_NAME_NOT_NULL); } if (iban != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.IBAN_NOT_NULL); } if (bic != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BIC_NOT_NULL); } if (accountNumber != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_NUMBER_NOT_NULL); } if (bankCodeNumber != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_CODE_NUMBER_NOT_NULL); } } else { if (CommonUtil.isEmptyString(bankAccountIn.getAccountHolderName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ACCOUNT_HOLDER_NAME_REQUIRED); } if (CommonUtil.isEmptyString(iban) != CommonUtil.isEmptyString(bic)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_IBAN_AND_BIC_REQUIRED); } if (!CommonUtil.isEmptyString(iban) && !CommonUtil.checkIban(iban)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_IBAN); } if (!CommonUtil.isEmptyString(bic) && !CommonUtil.checkBic(bic)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_BIC); } if (CommonUtil.isEmptyString(accountNumber) != CommonUtil.isEmptyString(bankCodeNumber)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_ACCOUNT_NUMBER_AND_BANK_CODE_NUMBER_REQUIRED); } if (CommonUtil.isEmptyString(iban) && CommonUtil.isEmptyString(accountNumber)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.IBAN_OR_BANK_ACCOUNT_ACCOUNT_NUMBER_REQUIRED); } } } private void checkDiagnosisInput(DiagnosisInVO diagnosisIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(diagnosisIn.getProbandId(), probandDao); AlphaId alphaId = CheckIDUtil.checkAlphaIdId(diagnosisIn.getCodeId(), this.getAlphaIdDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (diagnosisIn.getStart() == null && diagnosisIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_START_DATE_REQUIRED); } // other input checks if (diagnosisIn.getStart() != null && diagnosisIn.getStop() != null && diagnosisIn.getStop().compareTo(diagnosisIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new DiagnosisCollisionFinder(probandDao, this.getDiagnosisDao())).collides(diagnosisIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DIAGNOSIS_OVERLAPPING); } } private void checkInquiryValueInput(InquiryValueInVO inquiryValueIn, Proband proband, Inquiry inquiry) throws ServiceException { InputFieldDao inputFieldDao = this.getInputFieldDao(); InputField inputField = inquiry.getField(); inputFieldDao.lock(inputField, LockMode.PESSIMISTIC_WRITE); ServiceUtil.checkInputFieldTextValue(inputField, inquiry.isOptional(), inquiryValueIn.getTextValue(), inputFieldDao, this.getInputFieldSelectionSetValueDao()); ServiceUtil.checkInputFieldBooleanValue(inputField, inquiry.isOptional(), inquiryValueIn.getBooleanValue(), inputFieldDao); ServiceUtil.checkInputFieldLongValue(inputField, inquiry.isOptional(), inquiryValueIn.getLongValue(), inputFieldDao); ServiceUtil.checkInputFieldFloatValue(inputField, inquiry.isOptional(), inquiryValueIn.getFloatValue(), inputFieldDao); ServiceUtil.checkInputFieldDateValue(inputField, inquiry.isOptional(), inquiryValueIn.getDateValue(), inputFieldDao); ServiceUtil.checkInputFieldTimeValue(inputField, inquiry.isOptional(), inquiryValueIn.getTimeValue(), inputFieldDao); ServiceUtil.checkInputFieldTimestampValue(inputField, inquiry.isOptional(), inquiryValueIn.getTimestampValue(), inputFieldDao); ServiceUtil.checkInputFieldInkValue(inputField, inquiry.isOptional(), inquiryValueIn.getInkValues(), inputFieldDao); ServiceUtil.checkInputFieldSelectionSetValues(inputField, inquiry.isOptional(), inquiryValueIn.getSelectionValueIds(), inputFieldDao, this.getInputFieldSelectionSetValueDao()); if ((new InquiryValueCollisionFinder(this.getProbandDao(), this.getInquiryValueDao())).collides(inquiryValueIn)) { throw L10nUtil .initServiceException(ServiceExceptionCodes.INQUIRY_VALUE_ALREADY_EXISTS, CommonUtil.inputFieldOutVOToString(inputFieldDao.toInputFieldOutVO(inputField))); } } private void checkMedicationInput(MedicationInVO medicationIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(medicationIn.getProbandId(), probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); AspDao aspDao = this.getAspDao(); Asp asp = null; if (medicationIn.getAspId() != null) { asp = CheckIDUtil.checkAspId(medicationIn.getAspId(), aspDao); } AspSubstanceDao aspSubstanceDao = this.getAspSubstanceDao(); Collection<Long> substanceIds = medicationIn.getSubstanceIds(); if (substanceIds != null && substanceIds.size() > 0) { Iterator<Long> it = substanceIds.iterator(); HashSet<Long> dupeCheck = new HashSet<Long>(substanceIds.size()); HashSet<Long> aspSubstanceIds; Collection<AspSubstance> aspSubstances; if (asp != null && ((aspSubstances = asp.getSubstances()) != null) && aspSubstances.size() > 0) { aspSubstanceIds = new HashSet<Long>(aspSubstances.size()); Iterator<AspSubstance> aspSubstancesIt = aspSubstances.iterator(); while (aspSubstancesIt.hasNext()) { aspSubstanceIds.add(aspSubstancesIt.next().getId()); } } else { aspSubstanceIds = new HashSet<Long>(); } while (it.hasNext()) { Long id = it.next(); if (id == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_ID_IS_NULL); } AspSubstance substance = CheckIDUtil.checkAspSubstanceId(id, aspSubstanceDao); if (!dupeCheck.add(substance.getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DUPLICATE_SUBSTANCE, aspSubstanceDao.toAspSubstanceVO(substance).getName()); } if (asp != null && !aspSubstanceIds.remove(id)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_NOT_CONTAINED, aspDao.toAspVO(asp).getName(), aspSubstanceDao.toAspSubstanceVO(substance).getName()); } } if (asp != null && aspSubstanceIds.size() > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCE_MISSING, aspDao.toAspVO(asp).getName(), ServiceUtil.aspSubstanceIDsToString(aspSubstanceIds, this.getAspSubstanceDao())); } } else { if (asp == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_SUBSTANCES_REQUIRED); } } Diagnosis diagnosis = null; if (medicationIn.getDiagnosisId() != null) { diagnosis = CheckIDUtil.checkDiagnosisId(medicationIn.getDiagnosisId(), this.getDiagnosisDao()); if (!proband.equals(diagnosis.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_WRONG_DIAGNOSIS, proband.getId().toString()); } } Procedure procedure = null; if (medicationIn.getProcedureId() != null) { procedure = CheckIDUtil.checkProcedureId(medicationIn.getProcedureId(), this.getProcedureDao()); if (!proband.equals(procedure.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_WRONG_PROCEDURE, proband.getId().toString()); } } if (medicationIn.getDoseValue() != null) { if (medicationIn.getDoseValue() <= 0.0f) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_VALUE_LESS_THAN_OR_EQUAL_ZERO); } if (CommonUtil.isEmptyString(medicationIn.getDoseUnit())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_UNIT_REQUIRED); } } else { if (medicationIn.getDoseUnit() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_DOSE_UNIT_NOT_NULL); } } if (medicationIn.getStart() == null && medicationIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_START_DATE_REQUIRED); } // other input checks if (medicationIn.getStart() != null && medicationIn.getStop() != null && medicationIn.getStop().compareTo(medicationIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new MedicationCollisionFinder(probandDao, this.getMedicationDao())).collides(medicationIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_OVERLAPPING); } } private void checkMoneyTransferInput(MoneyTransferInVO moneyTransferIn, Long maxAllowedCostTypes) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(moneyTransferIn.getProbandId(), probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); if (moneyTransferIn.getTrialId() != null) { Trial trial = CheckIDUtil.checkTrialId(moneyTransferIn.getTrialId(), this.getTrialDao()); ServiceUtil.checkTrialLocked(trial); (new MaxCostTypesAdapter(maxAllowedCostTypes, this.getTrialDao(), this.getMoneyTransferDao())).checkCategoryInput(moneyTransferIn); } BankAccount bankAccount = null; if (moneyTransferIn.getBankAccountId() != null) { bankAccount = CheckIDUtil.checkBankAccountId(moneyTransferIn.getBankAccountId(), this.getBankAccountDao()); if (!proband.equals(bankAccount.getProband())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_WRONG_BANK_ACCOUNT, proband.getId().toString()); } } if (PaymentMethod.WIRE_TRANSFER.equals(moneyTransferIn.getMethod())) { if (bankAccount == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_BANK_ACCOUNT_REQUIRED); } } else { if (bankAccount != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_BANK_ACCOUNT_NOT_NULL); } if (moneyTransferIn.getReasonForPayment() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_REASON_FORM_PAYMENT_NOT_NULL); } if (moneyTransferIn.getReference() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_REFERENCE_NOT_NULL); } } if (PaymentMethod.VOUCHER.equals(moneyTransferIn.getMethod())) { if (moneyTransferIn.getAmount() < 0.0f) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_AMOUNT_NEGATIVE); } } else { if (moneyTransferIn.getVoucherCode() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_VOUCHER_CODE_NOT_NULL); } } if (moneyTransferIn.getShowComment() && CommonUtil.isEmptyString(moneyTransferIn.getComment())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_COMMENT_REQUIRED); } } private void checkParents(ProbandInVO probandIn, Proband child) throws ServiceException { Iterator<Proband> parentsIt = child.getParents().iterator(); int parentCount = 0; HashSet<Sex> parentGenders = new HashSet<Sex>(Sex.literals().size()); boolean isParent = false; while (parentsIt.hasNext()) { Proband parent = parentsIt.next(); if (parent.getId().equals(probandIn.getId())) { isParent = true; break; } if (parent.isPerson()) { ProbandContactParticulars personParticlars = parent.getPersonParticulars(); if (personParticlars != null && personParticlars.getGender() != null) { parentGenders.add(personParticlars.getGender()); } } else { AnimalContactParticulars animalParticlars = parent.getAnimalParticulars(); if (animalParticlars != null && animalParticlars.getGender() != null) { parentGenders.add(animalParticlars.getGender()); } } parentCount++; } if (!isParent) { ProbandDao probandDao = this.getProbandDao(); if (parentCount >= 2) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_TWO_PARENTS, child.getId().toString()); } if (probandIn.getGender() != null && !parentGenders.add(probandIn.getGender())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_PARENT_WITH_SAME_SEX, child.getId().toString(), L10nUtil.getSexName(Locales.USER, probandIn.getGender().name())); } } } private void checkProbandAddressInput(ProbandAddressInVO addressIn) throws ServiceException { (new ProbandAddressTypeTagAdapter(this.getProbandDao(), this.getAddressTypeDao())).checkTagValueInput(addressIn); } private void checkProbandContactDetailValueInput(ProbandContactDetailValueInVO contactValueIn) throws ServiceException { (new ProbandContactDetailTypeTagAdapter(this.getProbandDao(), this.getContactDetailTypeDao())).checkTagValueInput(contactValueIn); } private void checkProbandImageInput(ProbandImageInVO probandImage) throws ServiceException { if (probandImage.getDatas() != null && probandImage.getDatas().length > 0) { Integer probandImageSizeLimit = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_SIZE_LIMIT, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_SIZE_LIMIT); if (probandImageSizeLimit != null && probandImage.getDatas().length > probandImageSizeLimit) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_SIZE_LIMIT_EXCEEDED, CommonUtil.humanReadableByteCount(probandImageSizeLimit)); } if (probandImage.getMimeType() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_REQUIRED); } Iterator<MimeType> it = this.getMimeTypeDao().findByMimeTypeModule(probandImage.getMimeType(), FileModule.PROBAND_IMAGE).iterator(); if (!it.hasNext()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_UNKNOWN, probandImage.getMimeType()); } if (!it.next().isImage()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_MIME_TYPE_NO_IMAGE, probandImage.getMimeType()); } Dimension imageDimension = CoreUtil.getImageDimension(probandImage.getDatas()); if (imageDimension == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_CANNOT_READ_DIMENSIONS); } else { Integer probandImageMinWidth = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_MIN_WIDTH, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_MIN_WIDTH); Integer probandImageMinHeight = Settings.getIntNullable(SettingCodes.PROBAND_IMAGE_MIN_HEIGHT, Bundle.SETTINGS, DefaultSettings.PROBAND_IMAGE_MIN_HEIGHT); if (probandImageMinWidth != null && imageDimension.getWidth() < (double) probandImageMinWidth) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_WIDTH_LESS_THAN_LIMIT, probandImageMinWidth); } if (probandImageMinHeight != null && imageDimension.getHeight() < (double) probandImageMinHeight) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_IMAGE_HEIGHT_LESS_THAN_LIMIT, probandImageMinHeight); } } } } private void checkProbandInput(ProbandInVO probandIn) throws ServiceException { // referential checks CheckIDUtil.checkDepartmentId(probandIn.getDepartmentId(), this.getDepartmentDao()); ProbandCategory category = CheckIDUtil.checkProbandCategoryId(probandIn.getCategoryId(), this.getProbandCategoryDao()); if (probandIn.getPhysicianId() != null) { CheckIDUtil.checkStaffId(probandIn.getPhysicianId(), this.getStaffDao()); } if (probandIn.getChildIds() != null && probandIn.getChildIds().size() > 0) { ProbandDao probandDao = this.getProbandDao(); ArrayList<Long> childIds = new ArrayList<Long>(probandIn.getChildIds()); Collections.sort(childIds); Iterator<Long> it = childIds.iterator(); HashSet<Long> dupeCheck = new HashSet<Long>(childIds.size()); while (it.hasNext()) { Long id = it.next(); if (id == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CHILD_NULL); } Proband child = CheckIDUtil.checkProbandId(id, probandDao, LockMode.PESSIMISTIC_WRITE); if (!dupeCheck.add(child.getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DUPLICATE_PROBAND_CHILD, child.getId().toString()); } checkParents(probandIn, child); } } // other input checks if (probandIn.isPerson()) { if (!category.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } if (!probandIn.isBlinded()) { if (CommonUtil.isEmptyString(probandIn.getFirstName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_REQUIRED); } if (CommonUtil.isEmptyString(probandIn.getLastName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_REQUIRED); } if (probandIn.getDateOfBirth() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_REQUIRED); } else if (DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getGender() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_GENDER_REQUIRED); } if (probandIn.getAlias() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ALIAS_NOT_NULL); } } else { if (probandIn.getPrefixedTitle1() != null || probandIn.getPrefixedTitle2() != null || probandIn.getPrefixedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PREFIXED_TITLES_NOT_NULL); } if (probandIn.getFirstName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_NOT_NULL); } if (probandIn.getLastName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_NOT_NULL); } if (probandIn.getDateOfBirth() != null && DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getPostpositionedTitle1() != null || probandIn.getPostpositionedTitle2() != null || probandIn.getPostpositionedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_POSTPOSITIONED_TITLES_NOT_NULL); } if (probandIn.getCitizenship() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CITIZENSHIP_NOT_NULL); } } if (probandIn.getAnimalName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_NOT_NULL); } } else { if (!category.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } if (!probandIn.isBlinded()) { if (CommonUtil.isEmptyString(probandIn.getAnimalName())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_REQUIRED); } if (probandIn.getDateOfBirth() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_REQUIRED); } else if (DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } if (probandIn.getGender() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_GENDER_REQUIRED); } if (probandIn.getAlias() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ALIAS_NOT_NULL); } } else { if (probandIn.getAnimalName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.ANIMAL_NAME_NOT_NULL); } if (probandIn.getDateOfBirth() != null && DateCalc.getStartOfDay(probandIn.getDateOfBirth()).compareTo(new Date()) > 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DATE_OF_BIRTH_IN_THE_FUTURE); } } if (probandIn.getPrefixedTitle1() != null || probandIn.getPrefixedTitle2() != null || probandIn.getPrefixedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PREFIXED_TITLES_NOT_NULL); } if (probandIn.getFirstName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_FIRST_NAME_NOT_NULL); } if (probandIn.getLastName() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LAST_NAME_NOT_NULL); } if (probandIn.getPostpositionedTitle1() != null || probandIn.getPostpositionedTitle2() != null || probandIn.getPostpositionedTitle3() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_POSTPOSITIONED_TITLES_NOT_NULL); } if (probandIn.getCitizenship() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CITIZENSHIP_NOT_NULL); } } if (probandIn.getRatingMax() != null) { if (probandIn.getRatingMax() <= 0l) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_MAX_LESS_THAN_OR_EQUAL_ZERO); } else if (probandIn.getRating() == null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_REQUIRED); } else { if (probandIn.getRating() < 0l) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_LESS_THAN_ZERO); } else if (probandIn.getRating() > probandIn.getRatingMax()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_GREATER_THAN_RATING_MAX); } } } else if (probandIn.getRating() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_RATING_NOT_NULL); } } private void checkProbandLoop(Proband proband) throws ServiceException { (new ProbandReflexionGraph(this.getProbandDao())).checkGraphLoop(proband, false, true); } private void checkProbandStatusEntryInput(ProbandStatusEntryInVO statusEntryIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(statusEntryIn.getProbandId(), probandDao); ProbandStatusType statusType = CheckIDUtil.checkProbandStatusTypeId(statusEntryIn.getTypeId(), this.getProbandStatusTypeDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (proband.isPerson() && !statusType.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandStatusTypeName(Locales.USER, statusType.getNameL10nKey())); } if (!proband.isPerson() && !statusType.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandStatusTypeName(Locales.USER, statusType.getNameL10nKey())); } // other input checks if (statusEntryIn.getStop() != null && statusEntryIn.getStop().compareTo(statusEntryIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_ENTRY_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new ProbandStatusEntryCollisionFinder(probandDao, this.getProbandStatusEntryDao())).collides(statusEntryIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_STATUS_ENTRY_OVERLAPPING); } } private void checkProbandTagValueInput(ProbandTagValueInVO tagValueIn) throws ServiceException { (new ProbandTagAdapter(this.getProbandDao(), this.getProbandTagDao())).checkTagValueInput(tagValueIn); } private void checkProcedureInput(ProcedureInVO procedureIn) throws ServiceException { ProbandDao probandDao = this.getProbandDao(); // referential checks Proband proband = CheckIDUtil.checkProbandId(procedureIn.getProbandId(), probandDao); OpsCode opsCode = CheckIDUtil.checkOpsCodeId(procedureIn.getCodeId(), this.getOpsCodeDao()); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); if (procedureIn.getStart() == null && procedureIn.getStop() != null) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_START_DATE_REQUIRED); } // other input checks if (procedureIn.getStart() != null && procedureIn.getStop() != null && procedureIn.getStop().compareTo(procedureIn.getStart()) <= 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_END_DATE_LESS_THAN_OR_EQUAL_TO_START_DATE); } if ((new ProcedureCollisionFinder(probandDao, this.getProcedureDao())).collides(procedureIn)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROCEDURE_OVERLAPPING); } } private InquiryValuesOutVO getInquiryValues(Trial trial, String category, ProbandOutVO probandVO, Boolean active, Boolean activeSignup, boolean jsValues, boolean loadAllJsValues, boolean sort, PSFVO psf) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); InquiryValuesOutVO result = new InquiryValuesOutVO(); Collection<Map> inquiryValues = inquiryValueDao.findByProbandTrialCategoryActiveJs(probandVO.getId(), trial.getId(), category, active, activeSignup, sort, null, psf); result.setPageValues(ServiceUtil.getInquiryValues(probandVO, inquiryValues, null, this.getInquiryDao(), inquiryValueDao)); if (jsValues) { if (loadAllJsValues) { result.setJsValues(ServiceUtil.getInquiryJsonValues( inquiryValueDao.findByProbandTrialActiveJs(probandVO.getId(), trial.getId(), active, activeSignup, sort, true, null), false, inquiryValueDao, this.getInputFieldSelectionSetValueDao())); } else { result.setJsValues(ServiceUtil.getInquiryJsonValues(inquiryValues, true, inquiryValueDao, this.getInputFieldSelectionSetValueDao())); } } return result; } @Override protected BankAccountOutVO handleAddBankAccount( AuthenticationVO auth, BankAccountInVO newBankAccount) throws Exception { checkBankAccountInput(newBankAccount); BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = bankAccountDao.bankAccountInVOToEntity(newBankAccount); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(bankAccount, now, user); bankAccount = bankAccountDao.create(bankAccount); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); ServiceUtil.logSystemMessage(bankAccount.getProband(), result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected DiagnosisOutVO handleAddDiagnosis(AuthenticationVO auth, DiagnosisInVO newDiagnosis) throws Exception { checkDiagnosisInput(newDiagnosis); DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = diagnosisDao.diagnosisInVOToEntity(newDiagnosis); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(diagnosis, now, user); diagnosis = diagnosisDao.create(diagnosis); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); ServiceUtil.logSystemMessage(diagnosis.getProband(), result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected MedicationOutVO handleAddMedication(AuthenticationVO auth, MedicationInVO newMedication) throws Exception { checkMedicationInput(newMedication); MedicationDao medicationDao = this.getMedicationDao(); Medication medication = medicationDao.medicationInVOToEntity(newMedication); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(medication, now, user); medication = medicationDao.create(medication); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(medication.getProband(), result.getProband(), now, user, SystemMessageCodes.MEDICATION_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleAddMoneyTransfer( AuthenticationVO auth, MoneyTransferInVO newMoneyTransfer, Long maxAllowedCostTypes) throws Exception { checkMoneyTransferInput(newMoneyTransfer, maxAllowedCostTypes); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = moneyTransferDao.moneyTransferInVOToEntity(newMoneyTransfer); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(moneyTransfer, now, user); moneyTransfer = moneyTransferDao.create(moneyTransfer); Trial trial = moneyTransfer.getTrial(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_CREATED, result, null, this.getJournalEntryDao()); } ServiceUtil .logSystemMessage(moneyTransfer.getProband(), result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleAddProband(AuthenticationVO auth, ProbandInVO newProband, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { checkProbandInput(newProband); User user = CoreUtil.getUser(); this.getUserDao().lock(user, LockMode.PESSIMISTIC_WRITE); if (!user.getDepartment().getId().equals(newProband.getDepartmentId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } Timestamp now = new Timestamp(System.currentTimeMillis()); Proband proband = ServiceUtil.createProband(newProband, now, user, this.getProbandDao(), this.getPrivacyConsentStatusTypeDao(), this.getProbandContactParticularsDao(), this.getAnimalContactParticularsDao(), this.getNotificationDao()); ProbandOutVO result = this.getProbandDao().toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_CREATED, result, null, journalEntryDao); Staff physician = proband.getPhysician(); if (physician != null) { ServiceUtil.logSystemMessage(physician, result, now, user, SystemMessageCodes.PROBAND_CREATED, result, null, journalEntryDao); } return result; } @Override protected ProbandAddressOutVO handleAddProbandAddress( AuthenticationVO auth, ProbandAddressInVO newProbandAddress) throws Exception { checkProbandAddressInput(newProbandAddress); ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = addressDao.probandAddressInVOToEntity(newProbandAddress); if (addressDao.getCount(address.getProband().getId(), null, null, true) == 0) { address.setWireTransfer(true); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(address, now, user); address = addressDao.create(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(address.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandContactDetailValueOutVO handleAddProbandContactDetailValue( AuthenticationVO auth, ProbandContactDetailValueInVO newProbandContactDetailValue) throws Exception { checkProbandContactDetailValueInput(newProbandContactDetailValue); ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = contactValueDao.probandContactDetailValueInVOToEntity(newProbandContactDetailValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(contactValue, now, user); contactValue = contactValueDao.create(contactValue); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); ServiceUtil.logSystemMessage(contactValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleAddProbandStatusEntry( AuthenticationVO auth, ProbandStatusEntryInVO newProbandStatusEntry) throws Exception { checkProbandStatusEntryInput(newProbandStatusEntry); ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = statusEntryDao.probandStatusEntryInVOToEntity(newProbandStatusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(statusEntry, now, user); statusEntry = statusEntryDao.create(statusEntry); notifyProbandInactive(statusEntry, now); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); ServiceUtil.logSystemMessage(statusEntry.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleAddProbandTagValue( AuthenticationVO auth, ProbandTagValueInVO newProbandTagValue) throws Exception { checkProbandTagValueInput(newProbandTagValue); ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = tagValueDao.probandTagValueInVOToEntity(newProbandTagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(tagValue, now, user); tagValue = tagValueDao.create(tagValue); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); ServiceUtil.logSystemMessage(tagValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleAddProcedure(AuthenticationVO auth, ProcedureInVO newProcedure) throws Exception { checkProcedureInput(newProcedure); ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = procedureDao.procedureInVOToEntity(newProcedure); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(procedure, now, user); procedure = procedureDao.create(procedure); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); ServiceUtil.logSystemMessage(procedure.getProband(), result.getProband(), now, user, SystemMessageCodes.PROCEDURE_CREATED, result, null, this.getJournalEntryDao()); return result; } @Override protected Collection<String> handleCompleteCostTypes(AuthenticationVO auth, Long trialDepartmentId, Long trialId, Long probandDepartmentId, Long probandId, String costTypePrefix, Integer limit) throws Exception { if (trialDepartmentId != null) { CheckIDUtil.checkDepartmentId(trialDepartmentId, this.getDepartmentDao()); } if (probandDepartmentId != null) { CheckIDUtil.checkDepartmentId(probandDepartmentId, this.getDepartmentDao()); } if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getMoneyTransferDao().findCostTypes(trialDepartmentId, trialId, probandDepartmentId, probandId, costTypePrefix, limit); } @Override protected BankAccountOutVO handleDeleteBankAccount(AuthenticationVO auth, Long bankAccountId) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); Proband proband = bankAccount.getProband(); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_BANK_ACCOUNT); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); proband.removeBankAccounts(bankAccount); bankAccount.setProband(null); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Iterator<MoneyTransfer> moneyTransfersIt = bankAccount.getMoneyTransfers().iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); MoneyTransferOutVO moneyTransferVO = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED_MONEY_TRANSFER_DELETED, moneyTransferVO, null, journalEntryDao); trial.removePayoffs(moneyTransfer); } moneyTransfer.setBankAccount(null); moneyTransferDao.remove(moneyTransfer); moneyTransfer.setProband(null); proband.removeMoneyTransfers(moneyTransfer); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED_MONEY_TRANSFER_DELETED, moneyTransferVO, null, journalEntryDao); } bankAccount.getMoneyTransfers().clear(); bankAccountDao.remove(bankAccount); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_DELETED, result, null, journalEntryDao); return result; } @Override protected DiagnosisOutVO handleDeleteDiagnosis(AuthenticationVO auth, Long diagnosisId) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = CheckIDUtil.checkDiagnosisId(diagnosisId, diagnosisDao); Proband proband = diagnosis.getProband(); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_DIAGNOSIS); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); AlphaId alphaId = diagnosis.getCode(); alphaId.removeDiagnoses(diagnosis); diagnosis.setCode(null); proband.removeDiagnoses(diagnosis); diagnosis.setProband(null); MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = diagnosis.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); MedicationOutVO originalMedicationVO = medicationDao.toMedicationOutVO(medication); medication.setDiagnosis(null); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); MedicationOutVO medicationVO = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_DELETED_MEDICATION_UPDATED, medicationVO, originalMedicationVO, journalEntryDao); } diagnosis.getMedications().clear(); diagnosisDao.remove(diagnosis); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_DELETED, result, null, journalEntryDao); return result; } @Override protected MedicationOutVO handleDeleteMedication(AuthenticationVO auth, Long medicationId) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication medication = CheckIDUtil.checkMedicationId(medicationId, medicationDao); Proband proband = medication.getProband(); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MEDICATION); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); Diagnosis diagnosis = medication.getDiagnosis(); Procedure procedure = medication.getProcedure(); proband.removeMedications(medication); medication.setProband(null); if (diagnosis != null) { diagnosis.removeMedications(medication); medication.setDiagnosis(null); } if (procedure != null) { procedure.removeMedications(medication); medication.setProcedure(null); } medicationDao.remove(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.MEDICATION_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleDeleteMoneyTransfer(AuthenticationVO auth, Long moneyTransferId) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); Proband proband = moneyTransfer.getProband(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); BankAccount bankAccount = moneyTransfer.getBankAccount(); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); trial.removePayoffs(moneyTransfer); moneyTransfer.setTrial(null); } proband.removeMoneyTransfers(moneyTransfer); moneyTransfer.setProband(null); if (bankAccount != null) { bankAccount.removeMoneyTransfers(moneyTransfer); moneyTransfer.setBankAccount(null); } moneyTransferDao.remove(moneyTransfer); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_DELETED, result, null, journalEntryDao); } ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_DELETED, result, null, journalEntryDao); return result; } @Override protected ProbandOutVO handleDeleteProband(AuthenticationVO auth, Long probandId, boolean defer, boolean force, String deferredDeleteReason, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ProbandOutVO result; if (!force && defer) { Proband originalProband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(originalProband, maxInstances, maxParentsDepth, maxChildrenDepth); if (original.getBlinded()) { if (!user.getDepartment().getId().equals(originalProband.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } } else { if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } probandDao.evict(originalProband); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); if (CommonUtil.isEmptyString(deferredDeleteReason)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DEFERRED_DELETE_REASON_REQUIRED); } proband.setDeferredDelete(true); proband.setDeferredDeleteReason(deferredDeleteReason); CoreUtil.modifyVersion(proband, proband.getVersion(), now, user); // no opt. locking probandDao.update(proband); result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_MARKED_FOR_DELETION, result, original, journalEntryDao); Iterator<ProbandOutVO> parentsIt = original.getParents().iterator(); while (parentsIt.hasNext()) { ProbandOutVO parent = parentsIt.next(); ServiceUtil.logSystemMessage(probandDao.load(parent.getId()), result, now, user, SystemMessageCodes.PROBAND_MARKED_FOR_DELETION, result, original, journalEntryDao); } } else { Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); if (result.getBlinded()) { if (!user.getDepartment().getId().equals(result.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } } else { if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } ServiceUtil.removeProband(proband, result, true, user, now, this.getProbandDao(), this.getProbandContactParticularsDao(), this.getAnimalContactParticularsDao(), journalEntryDao, this.getNotificationDao(), this.getNotificationRecipientDao(), this.getProbandTagValueDao(), this.getProbandContactDetailValueDao(), this.getProbandAddressDao(), this.getProbandStatusEntryDao(), this.getDiagnosisDao(), this.getProcedureDao(), this.getMedicationDao(), this.getInventoryBookingDao(), this.getMoneyTransferDao(), this.getBankAccountDao(), this.getProbandListStatusEntryDao(), this.getProbandListEntryDao(), this.getProbandListEntryTagValueDao(), this.getInputFieldValueDao(), this.getInquiryValueDao(), this.getECRFFieldValueDao(), this.getECRFFieldStatusEntryDao(), this.getSignatureDao(), this.getECRFStatusEntryDao(), this.getMassMailRecipientDao(), this.getJobDao(), this.getFileDao()); } return result; } @Override protected ProbandAddressOutVO handleDeleteProbandAddress( AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); Proband proband = address.getProband(); this.getProbandDao().lock(proband, LockMode.PESSIMISTIC_WRITE); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } ServiceUtil.checkProbandLocked(proband); if (address.isWireTransfer() && addressDao.getCount(address.getProband().getId(), null, null, null) > 1) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DELETE_WIRE_TRANSFER_PROBAND_ADDRESS); } proband.removeAddresses(address); address.setProband(null); addressDao.remove(address); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandContactDetailValueOutVO handleDeleteProbandContactDetailValue( AuthenticationVO auth, Long probandContactDetailValueId) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = CheckIDUtil.checkProbandContactDetailValueId(probandContactDetailValueId, contactValueDao); Proband proband = contactValue.getProband(); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_CONTACT_DETAIL_VALUE); } ServiceUtil.checkProbandLocked(proband); proband.removeContactDetailValues(contactValue); contactValue.setProband(null); contactValueDao.remove(contactValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleDeleteProbandStatusEntry( AuthenticationVO auth, Long probandStatusEntryId) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, statusEntryDao); Proband proband = statusEntry.getProband(); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_STATUS_ENTRY); } ServiceUtil.checkProbandLocked(proband); proband.removeStatusEntries(statusEntry); statusEntry.setProband(null); ServiceUtil.removeNotifications(statusEntry.getNotifications(), this.getNotificationDao(), this.getNotificationRecipientDao()); statusEntryDao.remove(statusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleDeleteProbandTagValue(AuthenticationVO auth, Long probandTagValueId) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = CheckIDUtil.checkProbandTagValueId(probandTagValueId, tagValueDao); Proband proband = tagValue.getProband(); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_TAG_VALUE); } ServiceUtil.checkProbandLocked(proband); proband.removeTagValues(tagValue); tagValue.setProband(null); tagValueDao.remove(tagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_DELETED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleDeleteProcedure(AuthenticationVO auth, Long procedureId) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = CheckIDUtil.checkProcedureId(procedureId, procedureDao); Proband proband = procedure.getProband(); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); if (!result.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROCEDURE); } ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); OpsCode opsCode = procedure.getCode(); opsCode.removeProcedures(procedure); procedure.setCode(null); proband.removeProcedures(procedure); procedure.setProband(null); MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = procedure.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); MedicationOutVO originalMedicationVO = medicationDao.toMedicationOutVO(medication); medication.setProcedure(null); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); MedicationOutVO medicationVO = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROCEDURE_DELETED_MEDICATION_UPDATED, medicationVO, originalMedicationVO, journalEntryDao); } procedure.getMedications().clear(); procedureDao.remove(procedure); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROCEDURE_DELETED, result, null, journalEntryDao); return result; } @Override protected ReimbursementsExcelVO handleExportReimbursements( AuthenticationVO auth, Long probandId, String costType, PaymentMethod method, Boolean paid) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection<String> costTypes = moneyTransferDao.getCostTypes(null, null, null, probandVO.getId(), method); Collection<MoneyTransfer> moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, null, null, probandVO.getId(), method, costType, paid, null, null); ReimbursementsExcelVO result = ServiceUtil.createReimbursementsExcel(moneyTransfers, costTypes, null, probandVO, costType, method, paid, moneyTransferDao, this.getBankAccountDao(), this.getProbandAddressDao(), this.getAddressTypeDao(), this.getUserDao()); ServiceUtil.logSystemMessage(proband, result.getProband(), CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.REIMBURSEMENTS_EXPORTED, result, null, this.getJournalEntryDao()); return result; } @Override protected VisitScheduleExcelVO handleExportVisitSchedule( AuthenticationVO auth, Long probandId, Long trialId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); TrialOutVO trialVO = null; if (trialId != null) { trialVO = trialDao.toTrialOutVO(CheckIDUtil.checkTrialId(trialId, trialDao)); } VisitScheduleExcelWriter.Styles style = trialVO == null ? VisitScheduleExcelWriter.Styles.PROBAND_VISIT_SCHEDULE : VisitScheduleExcelWriter.Styles.PROBAND_TRIAL_VISIT_SCHEDULE; VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Collection<VisitScheduleItem> visitScheduleItems; switch (style) { case PROBAND_VISIT_SCHEDULE: visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(null, null, null, probandVO.getId(), null, null); break; case PROBAND_TRIAL_VISIT_SCHEDULE: visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(trialVO.getId(), null, null, probandVO.getId(), null, null); break; default: visitScheduleItems = null; } VisitScheduleExcelVO result = ServiceUtil.creatVisitScheduleExcel(visitScheduleItems, style, probandVO, trialVO, visitScheduleItemDao, this.getProbandListStatusEntryDao(), this.getProbandAddressDao(), this.getUserDao()); switch (style) { case PROBAND_VISIT_SCHEDULE: ServiceUtil.logSystemMessage(proband, result.getProband(), CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.VISIT_SCHEDULE_EXPORTED, result, null, this.getJournalEntryDao()); break; case PROBAND_TRIAL_VISIT_SCHEDULE: ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.VISIT_SCHEDULE_EXPORTED, result, null, this.getJournalEntryDao()); break; default: } return result; } @Override protected Collection<ProbandOutVO> handleGetAutoDeletionProbands( AuthenticationVO auth, Date today, Long departmentId, Long probandCategoryId, VariablePeriod reminderPeriod, Long reminderPeriodDays, PSFVO psf) throws Exception { if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ServiceUtil.checkReminderPeriod(reminderPeriod, reminderPeriodDays); ProbandDao probandDao = this.getProbandDao(); Collection autoDeletionProbands = probandDao.findToBeAutoDeleted(today, departmentId, probandCategoryId, reminderPeriod, reminderPeriodDays, null, true, psf); probandDao.toProbandOutVOCollection(autoDeletionProbands); return autoDeletionProbands; } @Override protected BankAccountOutVO handleGetBankAccount(AuthenticationVO auth, Long bankAccountId) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount bankAccount = CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); return result; } @Override protected long handleGetBankAccountCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getBankAccountDao().getCount(probandId); } @Override protected Collection<BankAccountOutVO> handleGetBankAccountList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } BankAccountDao bankAccountDao = this.getBankAccountDao(); Collection bankAccounts = bankAccountDao.findByProband(probandId, null, null, psf); bankAccountDao.toBankAccountOutVOCollection(bankAccounts); return bankAccounts; } @Override protected Collection<BankAccountOutVO> handleGetBankAccounts( AuthenticationVO auth, Long probandId, Boolean active, Long bankAccountId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } BankAccountDao bankAccountDao = this.getBankAccountDao(); if (bankAccountId != null) { CheckIDUtil.checkBankAccountId(bankAccountId, bankAccountDao); } Collection bankAccounts = bankAccountDao.findByProbandActiveId(probandId, active, bankAccountId); bankAccountDao.toBankAccountOutVOCollection(bankAccounts); return bankAccounts; } @Override protected Collection<InventoryBookingOutVO> handleGetCollidingProbandInventoryBookings( AuthenticationVO auth, Long probandStatusEntryId, Boolean isRelevantForProbandAppointments) throws Exception { ProbandStatusEntry probandStatus = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, this.getProbandStatusEntryDao()); Collection collidingInventoryBookings; if (!probandStatus.getType().isProbandActive()) { InventoryBookingDao inventoryBookingDao = this.getInventoryBookingDao(); collidingInventoryBookings = inventoryBookingDao.findByProbandCalendarInterval(probandStatus.getProband().getId(), null, probandStatus.getStart(), probandStatus.getStop(), isRelevantForProbandAppointments); inventoryBookingDao.toInventoryBookingOutVOCollection(collidingInventoryBookings); } else { collidingInventoryBookings = new ArrayList<InventoryBookingOutVO>(); } return collidingInventoryBookings; } @Override protected Collection<VisitScheduleItemOutVO> handleGetCollidingVisitScheduleItems( AuthenticationVO auth, Long probandStatusEntryId, boolean allProbandGroups) throws Exception { ProbandStatusEntry probandStatusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, this.getProbandStatusEntryDao()); if (!probandStatusEntry.getType().isProbandActive()) { Collection collidingVisitScheduleItems = new HashSet(); VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Iterator<ProbandListEntry> trialParticipationsIt = probandStatusEntry.getProband().getTrialParticipations().iterator(); while (trialParticipationsIt.hasNext()) { ProbandListEntry probandListEntry = trialParticipationsIt.next(); ProbandGroup probandGroup = probandListEntry.getGroup(); if (probandGroup != null) { collidingVisitScheduleItems .addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), probandStatusEntry.getStart(), probandStatusEntry.getStop())); } else { if (allProbandGroups) { collidingVisitScheduleItems.addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), null, probandStatusEntry.getStart(), probandStatusEntry.getStop())); } } } visitScheduleItemDao.toVisitScheduleItemOutVOCollection(collidingVisitScheduleItems); return new ArrayList<VisitScheduleItemOutVO>(collidingVisitScheduleItems); } else { return new ArrayList<VisitScheduleItemOutVO>(); } } @Override protected Collection<String> handleGetCostTypes(AuthenticationVO auth, Long trialDepartmentId, Long trialId, Long probandDepartmentId, Long probandId, PaymentMethod method) throws Exception { if (trialDepartmentId != null) { CheckIDUtil.checkDepartmentId(trialDepartmentId, this.getDepartmentDao()); } if (probandDepartmentId != null) { CheckIDUtil.checkDepartmentId(probandDepartmentId, this.getDepartmentDao()); } if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getMoneyTransferDao().getCostTypes(trialDepartmentId, trialId, probandDepartmentId, probandId, method); } @Override protected DiagnosisOutVO handleGetDiagnosis(AuthenticationVO auth, Long diagnosisId) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis diagnosis = CheckIDUtil.checkDiagnosisId(diagnosisId, diagnosisDao); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); return result; } @Override protected long handleGetDiagnosisCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getDiagnosisDao().getCount(probandId); } @Override protected Collection<DiagnosisOutVO> handleGetDiagnosisList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Collection diagnoses = diagnosisDao.findByProband(probandId, psf); diagnosisDao.toDiagnosisOutVOCollection(diagnoses); return diagnoses; } @Override protected long handleGetInquiryCount(AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } return this.getInquiryDao().getCount(trialId, active, activeSignup); } @Override protected long handleGetInquiryCount(AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } return this.getInquiryDao().getCount(trialId, category, active, activeSignup); } @Override protected Collection<InquiryValueOutVO> handleGetInquiryInputFieldValues( AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId, Long inputFieldId) throws Exception { if (trialId != null) { CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); } CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); CheckIDUtil.checkInputFieldId(inputFieldId, this.getInputFieldDao()); InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); Collection inquiryFieldValues = inquiryValueDao.findByTrialActiveProbandField(trialId, active, activeSignup, probandId, inputFieldId); inquiryValueDao.toInquiryValueOutVOCollection(inquiryFieldValues); return inquiryFieldValues; } @Override protected Collection<TrialOutVO> handleGetInquiryTrials(AuthenticationVO auth, Long probandId, Boolean active, Boolean activeSignup) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByInquiryValuesProbandSorted(null, probandId, active, activeSignup); trialDao.toTrialOutVOCollection(trials); return trials; } @Override protected InquiryValuesOutVO handleGetInquiryValue(AuthenticationVO auth, Long probandId, Long inquiryId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); InquiryDao inquiryDao = this.getInquiryDao(); Inquiry inquiry = CheckIDUtil.checkInquiryId(inquiryId, inquiryDao); InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); InquiryValuesOutVO result = new InquiryValuesOutVO(); Iterator<InquiryValue> it = inquiryValueDao.findByProbandInquiry(probandId, inquiryId).iterator(); if (it.hasNext()) { InquiryValue inquiryValue = it.next(); result.getPageValues().add(inquiryValueDao.toInquiryValueOutVO(inquiryValue)); if (!CommonUtil.isEmptyString(inquiryValue.getInquiry().getJsVariableName()) && Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { result.getJsValues().add(inquiryValueDao.toInquiryValueJsonVO(inquiryValue)); } } else { result.getPageValues().add( ServiceUtil.createPresetInquiryOutValue(probandDao.toProbandOutVO(proband), inquiryDao.toInquiryOutVO(inquiry), null)); if (!CommonUtil.isEmptyString(inquiry.getJsVariableName()) && Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { result.getJsValues().add(ServiceUtil.createPresetInquiryJsonValue(inquiry, this.getInputFieldSelectionSetValueDao())); } } return result; } @Override protected InquiryValueOutVO handleGetInquiryValueById(AuthenticationVO auth, Long inquiryValueId) throws Exception { InquiryValueDao inquiryValueDao = this.getInquiryValueDao(); return inquiryValueDao.toInquiryValueOutVO(CheckIDUtil.checkInquiryValueId(inquiryValueId, inquiryValueDao)); } @Override protected long handleGetInquiryValueCount(AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); return this.getInquiryValueDao().getCount(trialId, active, activeSignup, probandId); } @Override protected long handleGetInquiryValueCount(AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup, Long probandId) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); return this.getInquiryValueDao().getCount(trialId, category, active, activeSignup, probandId); } @Override protected InquiryValuesOutVO handleGetInquiryValues( AuthenticationVO auth, Long trialId, Boolean active, Boolean activeSignup, Long probandId, boolean sort, boolean loadAllJsValues, PSFVO psf) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); ProbandDao probandDao = this.getProbandDao(); ProbandOutVO probandVO = probandDao.toProbandOutVO(CheckIDUtil.checkProbandId(probandId, probandDao)); return ServiceUtil.getInquiryValues(trial, probandVO, active, activeSignup, Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION), loadAllJsValues, sort, psf, this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldSelectionSetValueDao()); } @Override protected InquiryValuesOutVO handleGetInquiryValues( AuthenticationVO auth, Long trialId, String category, Boolean active, Boolean activeSignup, Long probandId, boolean sort, boolean loadAllJsValues, PSFVO psf) throws Exception { Trial trial = CheckIDUtil.checkTrialId(trialId, this.getTrialDao()); ProbandDao probandDao = this.getProbandDao(); ProbandOutVO probandVO = probandDao.toProbandOutVO(CheckIDUtil.checkProbandId(probandId, probandDao)); return getInquiryValues(trial, category, probandVO, active, activeSignup, Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION), loadAllJsValues, sort, psf); } @Override protected MedicationOutVO handleGetMedication(AuthenticationVO auth, Long medicationId) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication medication = CheckIDUtil.checkMedicationId(medicationId, medicationDao); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); return result; } @Override protected long handleGetMedicationCount(AuthenticationVO auth, Long probandId, Long diagnosisId, Long procedureId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (diagnosisId != null) { CheckIDUtil.checkDiagnosisId(diagnosisId, this.getDiagnosisDao()); } if (procedureId != null) { CheckIDUtil.checkProcedureId(procedureId, this.getProcedureDao()); } return this.getMedicationDao().getCount(probandId, diagnosisId, procedureId); } @Override protected Collection<MedicationOutVO> handleGetMedicationList(AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } MedicationDao medicationDao = this.getMedicationDao(); Collection medications = medicationDao.findByProband(probandId, psf); medicationDao.toMedicationOutVOCollection(medications); return medications; } @Override protected MoneyTransferOutVO handleGetMoneyTransfer(AuthenticationVO auth, Long moneyTransferId) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); return result; } @Override protected long handleGetMoneyTransferCount(AuthenticationVO auth, Long probandId, Long bankAccountId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (bankAccountId != null) { CheckIDUtil.checkBankAccountId(bankAccountId, this.getBankAccountDao()); } return this.getMoneyTransferDao().getCount(null, probandId, bankAccountId, null, null, null); } @Override protected Collection<MoneyTransferOutVO> handleGetMoneyTransferList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, null, null, probandId, null, null, null, null, psf); moneyTransferDao.toMoneyTransferOutVOCollection(moneyTransfers); return moneyTransfers; } @Override protected String handleGetNewPaymentReference(AuthenticationVO auth, MoneyTransferInVO newMoneyTransfer) throws Exception { // TODO Auto-generated method stub return null; } @Override protected Collection<TrialOutVO> handleGetParticipationTrials(AuthenticationVO auth, Long probandId) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByParticipatingProbandSorted(probandId); trialDao.toTrialOutVOCollection(trials); return trials; } /** * @see org.phoenixctms.ctsms.service.proband.ProbandService#getProband(Long) */ @Override protected ProbandOutVO handleGetProband(AuthenticationVO auth, Long probandId, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); return result; } @Override protected ProbandAddressOutVO handleGetProbandAddress(AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); return result; } @Override protected long handleGetProbandAddressCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandAddressDao().getCount(probandId, null, null, null); } @Override protected Collection<ProbandAddressOutVO> handleGetProbandAddressList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandAddressDao addressDao = this.getProbandAddressDao(); Collection probandAddresses = addressDao.findByProband(probandId, null, null, null, psf); addressDao.toProbandAddressOutVOCollection(probandAddresses); return probandAddresses; } @Override protected ProbandContactDetailValueOutVO handleGetProbandContactDetailValue( AuthenticationVO auth, Long probandContactDetailValueId) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue contactValue = CheckIDUtil.checkProbandContactDetailValueId(probandContactDetailValueId, contactValueDao); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); return result; } @Override protected long handleGetProbandContactDetailValueCount( AuthenticationVO auth, Long probandId, Boolean na) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandContactDetailValueDao().getCount(probandId, null, na, null, null); } @Override protected Collection<ProbandContactDetailValueOutVO> handleGetProbandContactDetailValueList( AuthenticationVO auth, Long probandId, Boolean na, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); Collection probandContactValues = contactValueDao.findByProband(probandId, null, na, null, null, psf); contactValueDao.toProbandContactDetailValueOutVOCollection(probandContactValues); return probandContactValues; } @Override protected Collection<ProbandGroupOutVO> handleGetProbandGroupList( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandGroupDao probandGroupDao = this.getProbandGroupDao(); Collection probandGroups = probandGroupDao.findByProbandSorted(probandId); probandGroupDao.toProbandGroupOutVOCollection(probandGroups); return probandGroups; } @Override protected ProbandImageOutVO handleGetProbandImage(AuthenticationVO auth, Long probandId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandImageOutVO result = probandDao.toProbandImageOutVO(proband); return result; } @Override protected long handleGetProbandInventoryBookingCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getInventoryBookingDao().getCount(null, probandId, null, null, null); } @Override protected Collection<InventoryBookingOutVO> handleGetProbandInventoryBookingList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } InventoryBookingDao inventoryBookingDao = this.getInventoryBookingDao(); Collection inventoryBookings = inventoryBookingDao.findByProband(probandId, psf); inventoryBookingDao.toInventoryBookingOutVOCollection(inventoryBookings); return inventoryBookings; } @Override protected Collection<ProbandOutVO> handleGetProbandList(AuthenticationVO auth, Long probandId, Long departmentId, Integer maxInstances, PSFVO psf) throws Exception { ProbandDao probandDao = this.getProbandDao(); if (probandId != null) { CheckIDUtil.checkProbandId(probandId, probandDao); } if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } Collection probands = probandDao.findByIdDepartment(probandId, departmentId, psf); ArrayList<ProbandOutVO> result = new ArrayList<ProbandOutVO>(probands.size()); Iterator<Proband> probandIt = probands.iterator(); while (probandIt.hasNext()) { result.add(probandDao.toProbandOutVO(probandIt.next(), maxInstances)); } return result; } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatus( AuthenticationVO auth, Date now, Long probandId, Long departmentId, Long probandCategoryId, Boolean probandActive, Boolean hideAvailability, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findProbandStatus(CommonUtil.dateToTimestamp(now), probandId, departmentId, probandCategoryId, probandActive, hideAvailability, psf); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); return probandStatusEntries; } @Override protected ProbandStatusEntryOutVO handleGetProbandStatusEntry( AuthenticationVO auth, Long probandStatusEntryId) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry statusEntry = CheckIDUtil.checkProbandStatusEntryId(probandStatusEntryId, statusEntryDao); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); return result; } @Override protected long handleGetProbandStatusEntryCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandStatusEntryDao().getCount(probandId); } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatusEntryInterval(AuthenticationVO auth, Long departmentId, Long probandCategoryId, Boolean hideAvailability, Date from, Date to, boolean sort) throws Exception { if (departmentId != null) { CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } if (probandCategoryId != null) { CheckIDUtil.checkProbandCategoryId(probandCategoryId, this.getProbandCategoryDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findByDepartmentCategoryInterval(departmentId, probandCategoryId, CommonUtil.dateToTimestamp(from), CommonUtil.dateToTimestamp(to), null, null, hideAvailability); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); if (sort) { probandStatusEntries = new ArrayList(probandStatusEntries); Collections.sort((ArrayList) probandStatusEntries, new ProbandStatusEntryIntervalComparator(false)); } return probandStatusEntries; } @Override protected Collection<ProbandStatusEntryOutVO> handleGetProbandStatusEntryList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); Collection probandStatusEntries = statusEntryDao.findByProband(probandId, psf); statusEntryDao.toProbandStatusEntryOutVOCollection(probandStatusEntries); return probandStatusEntries; } @Override protected ProbandTagValueOutVO handleGetProbandTagValue(AuthenticationVO auth, Long probandTagValueId) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue tagValue = CheckIDUtil.checkProbandTagValueId(probandTagValueId, tagValueDao); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); return result; } @Override protected long handleGetProbandTagValueCount(AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProbandTagValueDao().getCount(probandId); } @Override protected Collection<ProbandTagValueOutVO> handleGetProbandTagValueList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); Collection probandTagValues = tagValueDao.findByProband(probandId, psf); tagValueDao.toProbandTagValueOutVOCollection(probandTagValues); return probandTagValues; } @Override protected ProcedureOutVO handleGetProcedure(AuthenticationVO auth, Long procedureId) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure procedure = CheckIDUtil.checkProcedureId(procedureId, procedureDao); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); return result; } @Override protected long handleGetProcedureCount( AuthenticationVO auth, Long probandId) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } return this.getProcedureDao().getCount(probandId); } @Override protected Collection<ProcedureOutVO> handleGetProcedureList( AuthenticationVO auth, Long probandId, PSFVO psf) throws Exception { if (probandId != null) { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); } ProcedureDao procedureDao = this.getProcedureDao(); Collection procedures = procedureDao.findByProband(probandId, psf); procedureDao.toProcedureOutVOCollection(procedures); return procedures; } @Override protected Collection<TrialOutVO> handleGetReimbursementTrials(AuthenticationVO auth, Long probandId, String costType, PaymentMethod method, Boolean paid) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); TrialDao trialDao = this.getTrialDao(); Collection trials = trialDao.findByReimbursementProbandSorted(probandId, method, costType, paid); trialDao.toTrialOutVOCollection(trials); return trials; } @Override protected ProbandAddressOutVO handleGetWireTransferProbandAddress(AuthenticationVO auth, Long probandId) throws Exception { CheckIDUtil.checkProbandId(probandId, this.getProbandDao()); ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); return probandAddressDao.toProbandAddressOutVO(probandAddressDao.findByProbandWireTransfer(probandId)); } @Override protected InquiriesPDFVO handleRenderInquiries(AuthenticationVO auth, Long trialId, Long probandId, Boolean active, Boolean activeSignup, boolean blank) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = null; TrialOutVO trialVO = null; Collection<Trial> trials = new ArrayList<Trial>(); if (trialId != null) { trial = CheckIDUtil.checkTrialId(trialId, trialDao); trialVO = trialDao.toTrialOutVO(trial); trials.add(trial); } else { trials = trialDao.findByInquiryValuesProbandSorted(null, probandId, active, activeSignup); } InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, active, activeSignup, blank, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { ServiceUtil.logSystemMessage(trial, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); } ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), trial != null ? SystemMessageCodes.INQUIRY_PDF_RENDERED : SystemMessageCodes.INQUIRIES_PDF_RENDERED, result, null, journalEntryDao); return result; } @Override protected InquiriesPDFVO handleRenderInquiriesSignup(AuthenticationVO auth, Long departmentId, Long probandId, Boolean activeSignup) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); Department department = null; if (departmentId != null) { department = CheckIDUtil.checkDepartmentId(departmentId, this.getDepartmentDao()); } Collection<Trial> trials = new ArrayList<Trial>(); Iterator<Trial> trialIt = this.getTrialDao().findBySignup(department != null ? department.getId() : null, true, null).iterator(); while (trialIt.hasNext()) { Trial trial = trialIt.next(); if (this.getInquiryValueDao().getCount(trial.getId(), null, activeSignup, proband.getId()) > 0) { trials.add(trial); } } InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, null, activeSignup, false, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); ServiceUtil.logSystemMessage(proband, (TrialOutVO) null, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRIES_SIGNUP_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected InquiriesPDFVO handleRenderInquiry(AuthenticationVO auth, Long trialId, String category, Long probandId, Boolean active, Boolean activeSignup, boolean blank) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = CheckIDUtil.checkTrialId(trialId, trialDao); TrialOutVO trialVO = trialDao.toTrialOutVO(trial); Collection<Trial> trials = new ArrayList<Trial>(); trials.add(trial); InquiriesPDFVO result = ServiceUtil.renderInquiries(proband, probandVO, trials, active, activeSignup, blank, this.getTrialDao(), this.getInquiryDao(), this.getInquiryValueDao(), this.getInputFieldDao(), this.getInputFieldSelectionSetValueDao(), this.getUserDao()); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(trial, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); ServiceUtil.logSystemMessage(proband, trialVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), CoreUtil.getUser(), SystemMessageCodes.INQUIRY_PDF_RENDERED, result, null, journalEntryDao); return result; } @Override protected ProbandLetterPDFVO handleRenderProbandLetterPDF( AuthenticationVO auth, Long probandAddressId) throws Exception { ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, probandAddressDao); if (!address.getProband().isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LETTER_NOT_FOR_ANIMAL_ENTRIES); } ProbandAddressOutVO addressVO = probandAddressDao.toProbandAddressOutVO(address); ProbandLetterPDFPainter painter = ServiceUtil.createProbandLetterPDFPainter(addressVO); User user = CoreUtil.getUser(); painter.getPdfVO().setRequestingUser(this.getUserDao().toUserOutVO(user)); (new PDFImprinter(painter, painter)).render(); ProbandLetterPDFVO result = painter.getPdfVO(); logSystemMessage(address.getProband(), addressVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), user, SystemMessageCodes.PROBAND_ADDRESS_PROBAND_LETTER_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandLetterPDFVO handleRenderProbandLettersPDF( AuthenticationVO auth, Long probandId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_LETTER_NOT_FOR_ANIMAL_ENTRIES); } ArrayList<ProbandOutVO> probandVOs = new ArrayList<ProbandOutVO>(); ProbandOutVO probandVO = probandDao.toProbandOutVO(proband); probandVOs.add(probandVO); ProbandLetterPDFPainter painter = ServiceUtil.createProbandLetterPDFPainter(probandVOs, this.getProbandAddressDao()); User user = CoreUtil.getUser(); painter.getPdfVO().setRequestingUser(this.getUserDao().toUserOutVO(user)); (new PDFImprinter(painter, painter)).render(); ProbandLetterPDFVO result = painter.getPdfVO(); ServiceUtil.logSystemMessage(proband, probandVO, CommonUtil.dateToTimestamp(result.getContentTimestamp()), user, SystemMessageCodes.PROBAND_LETTER_PDF_RENDERED, result, null, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleResetAutoDeleteDeadline( AuthenticationVO auth, Long probandId, Long version) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); ServiceUtil.checkProbandLocked(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); ServiceUtil.resetAutoDeleteDeadline(proband, now); probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_AUTO_DELETE_DEADLINE_RESET, result, original, this.getJournalEntryDao()); return result; } @Override protected Collection<MoneyTransferOutVO> handleSetAllMoneyTransfersPaid( AuthenticationVO auth, Long probandId, Long trialId, boolean paid) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } ServiceUtil.checkProbandLocked(proband); TrialDao trialDao = this.getTrialDao(); Trial trial = null; if (trialId != null) { trial = CheckIDUtil.checkTrialId(trialId, trialDao); } JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Collection<MoneyTransfer> moneyTransfers = moneyTransferDao.findByProbandTrialMethodCostTypePaidPerson(null, trial == null ? null : trial.getId(), null, proband.getId(), null, null, !paid, null, null); if (moneyTransfers.size() == 0) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PAID_NOT_CHANGED); } ArrayList<MoneyTransferOutVO> results = new ArrayList<MoneyTransferOutVO>(moneyTransfers.size()); Iterator<MoneyTransfer> moneyTransfersIt = moneyTransfers.iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } Trial moneyTransferTrial = moneyTransfer.getTrial(); if (moneyTransferTrial != null) { ServiceUtil.checkTrialLocked(moneyTransferTrial); } moneyTransfer.setPaid(paid); CoreUtil.modifyVersion(moneyTransfer, moneyTransfer, now, user); moneyTransferDao.update(moneyTransfer); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (moneyTransferTrial != null) { logSystemMessage(moneyTransferTrial, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); } ServiceUtil.logSystemMessage(moneyTransfer.getProband(), original.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); results.add(result); } return results; } @Override protected InquiryValuesOutVO handleSetInquiryValues( AuthenticationVO auth, Set<InquiryValueInVO> inquiryValuesIn, boolean force) throws Exception { Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); InquiryValuesOutVO result = new InquiryValuesOutVO(); ServiceException firstException = null; HashMap<Long, String> errorMessagesMap = new HashMap<Long, String>(); Proband proband = null; if (inquiryValuesIn != null && inquiryValuesIn.size() > 0) { Trial trial = null; ArrayList<InquiryValueOutVO> inquiryValues = new ArrayList<InquiryValueOutVO>(inquiryValuesIn.size()); ArrayList<InquiryValueJsonVO> jsInquiryValues = null; if (Settings.getBoolean(SettingCodes.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION, Bundle.SETTINGS, DefaultSettings.INQUIRY_VALUES_ENABLE_BROWSER_FIELD_CALCULATION)) { jsInquiryValues = new ArrayList<InquiryValueJsonVO>(inquiryValuesIn.size()); } Iterator<InquiryValueInVO> inquiryValuesInIt = inquiryValuesIn.iterator(); while (inquiryValuesInIt.hasNext()) { InquiryValueInVO inquiryValueIn = inquiryValuesInIt.next(); Inquiry inquiry = CheckIDUtil.checkInquiryId(inquiryValueIn.getInquiryId(), this.getInquiryDao()); if (trial == null) { trial = inquiry.getTrial(); ServiceUtil.checkTrialLocked(trial); if (!trial.getStatus().isInquiryValueInputEnabled()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUE_INPUT_DISABLED_FOR_TRIAL, CommonUtil.trialOutVOToString(this.getTrialDao().toTrialOutVO(trial))); } } else if (!trial.equals(inquiry.getTrial())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUES_FOR_DIFFERENT_TRIALS); } if (proband == null) { proband = CheckIDUtil.checkProbandId(inquiryValueIn.getProbandId(), this.getProbandDao(), LockMode.PESSIMISTIC_WRITE); ServiceUtil.checkProbandLocked(proband); } else if (!proband.getId().equals(inquiryValueIn.getProbandId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INQUIRY_VALUES_FOR_DIFFERENT_PROBANDS); } try { addUpdateInquiryValue(inquiryValueIn, proband, inquiry, now, user, force, Settings.getBoolean(SettingCodes.LOG_INQUIRY_VALUE_TRIAL, Bundle.SETTINGS, DefaultSettings.LOG_INQUIRY_VALUE_TRIAL), Settings.getBoolean(SettingCodes.LOG_INQUIRY_VALUE_PROBAND, Bundle.SETTINGS, DefaultSettings.LOG_INQUIRY_VALUE_PROBAND), inquiryValues, jsInquiryValues); } catch (ServiceException e) { if (firstException == null) { firstException = e; } errorMessagesMap.put(inquiry.getId(), e.getMessage()); } } if (firstException != null) { firstException.setData(errorMessagesMap); throw firstException; } Collections.sort(inquiryValues, new InquiryValueOutVOComparator()); result.setPageValues(inquiryValues); if (jsInquiryValues != null) { result.setJsValues(jsInquiryValues); } } return result; } @Override protected MoneyTransferOutVO handleSetMoneyTransferPaid( AuthenticationVO auth, Long moneyTransferId, Long version, boolean paid) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer moneyTransfer = CheckIDUtil.checkMoneyTransferId(moneyTransferId, moneyTransferDao); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } ProbandDao probandDao = this.getProbandDao(); Proband proband = moneyTransfer.getProband(); if (!proband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_NOT_PERSON); } probandDao.lock(proband, LockMode.PESSIMISTIC_WRITE); if (!probandDao.toProbandOutVO(proband).isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(proband); Trial trial = moneyTransfer.getTrial(); if (trial != null) { ServiceUtil.checkTrialLocked(trial); } if (paid == original.getPaid()) { // unboxed, ok throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PAID_NOT_CHANGED); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(moneyTransfer, version.longValue(), now, user); moneyTransfer.setPaid(paid); moneyTransferDao.update(moneyTransfer); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); } ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, paid ? SystemMessageCodes.MONEY_TRANSFER_PAID_SET : SystemMessageCodes.MONEY_TRANSFER_PAID_UNSET, result, original, journalEntryDao); return result; } @Override protected ProbandAddressOutVO handleSetProbandAddressWireTransfer( AuthenticationVO auth, Long probandAddressId, Long version) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress address = CheckIDUtil.checkProbandAddressId(probandAddressId, addressDao); Proband proband = address.getProband(); this.getProbandDao().lock(proband, LockMode.PESSIMISTIC_WRITE); ProbandAddressOutVO original = addressDao.toProbandAddressOutVO(address); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } ServiceUtil.checkProbandLocked(proband); if (address.isWireTransfer()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_ADDRESS_WIRE_TRANSFER_NOT_CHANGED); } JournalEntryDao journalEntryDao = this.getJournalEntryDao(); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); Iterator<ProbandAddress> addressesIt = addressDao.findByProband(proband.getId(), null, null, true, null).iterator(); while (addressesIt.hasNext()) { ProbandAddress oldWireTransferAddress = addressesIt.next(); ProbandAddressOutVO oldWireTransferAddressOriginal = addressDao.toProbandAddressOutVO(address); oldWireTransferAddress.setWireTransfer(false); CoreUtil.modifyVersion(oldWireTransferAddress, oldWireTransferAddress, now, user); addressDao.update(oldWireTransferAddress); ProbandAddressOutVO oldWireTransferAddressResult = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(oldWireTransferAddress.getProband(), oldWireTransferAddressOriginal.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_WIRE_TRANSFER_UNSET, oldWireTransferAddressResult, oldWireTransferAddressOriginal, journalEntryDao); } address.setWireTransfer(true); CoreUtil.modifyVersion(address, version.longValue(), now, user); addressDao.update(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(proband, result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_WIRE_TRANSFER_SET, result, original, journalEntryDao); return result; } @Override protected ProbandImageOutVO handleSetProbandImage(AuthenticationVO auth, ProbandImageInVO probandImage) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband originalProband = CheckIDUtil.checkProbandId(probandImage.getId(), probandDao); ProbandImageOutVO original = probandDao.toProbandImageOutVO(originalProband); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ServiceUtil.checkProbandLocked(originalProband); checkProbandImageInput(probandImage); boolean hasImage = original.getHasImage(); boolean cleared = probandImage.getDatas() == null || probandImage.getDatas().length == 0; probandDao.evict(originalProband); Proband proband = probandDao.probandImageInVOToEntity(probandImage); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalProband, proband, now, user); probandDao.update(proband); ProbandImageOutVO result = probandDao.toProbandImageOutVO(proband); ServiceUtil.logSystemMessage(proband, probandDao.toProbandOutVO(proband), now, user, cleared ? SystemMessageCodes.PROBAND_IMAGE_CLEARED : hasImage ? SystemMessageCodes.PROBAND_IMAGE_UPDATED : SystemMessageCodes.PROBAND_IMAGE_CREATED, result, original, this.getJournalEntryDao()); return result; } @Override protected BankAccountOutVO handleUpdateBankAccount( AuthenticationVO auth, BankAccountInVO modifiedBankAccount) throws Exception { BankAccountDao bankAccountDao = this.getBankAccountDao(); BankAccount originalBankAccount = CheckIDUtil.checkBankAccountId(modifiedBankAccount.getId(), bankAccountDao); BankAccountOutVO original = bankAccountDao.toBankAccountOutVO(originalBankAccount); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_BANK_ACCOUNT); } checkBankAccountInput(modifiedBankAccount); if (!modifiedBankAccount.getProbandId().equals(originalBankAccount.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.BANK_ACCOUNT_PROBAND_CHANGED); } bankAccountDao.evict(originalBankAccount); BankAccount bankAccount = bankAccountDao.bankAccountInVOToEntity(modifiedBankAccount); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalBankAccount, bankAccount, now, user); bankAccountDao.update(bankAccount); BankAccountOutVO result = bankAccountDao.toBankAccountOutVO(bankAccount); ServiceUtil .logSystemMessage(bankAccount.getProband(), result.getProband(), now, user, SystemMessageCodes.BANK_ACCOUNT_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected DiagnosisOutVO handleUpdateDiagnosis(AuthenticationVO auth, DiagnosisInVO modifiedDiagnosis) throws Exception { DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Diagnosis originalDiagnosis = CheckIDUtil.checkDiagnosisId(modifiedDiagnosis.getId(), diagnosisDao); DiagnosisOutVO original = diagnosisDao.toDiagnosisOutVO(originalDiagnosis); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_DIAGNOSIS); } checkDiagnosisInput(modifiedDiagnosis); diagnosisDao.evict(originalDiagnosis); Diagnosis diagnosis = diagnosisDao.diagnosisInVOToEntity(modifiedDiagnosis); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalDiagnosis, diagnosis, now, user); diagnosisDao.update(diagnosis); DiagnosisOutVO result = diagnosisDao.toDiagnosisOutVO(diagnosis); ServiceUtil.logSystemMessage(diagnosis.getProband(), result.getProband(), now, user, SystemMessageCodes.DIAGNOSIS_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected MedicationOutVO handleUpdateMedication(AuthenticationVO auth, MedicationInVO modifiedMedication) throws Exception { MedicationDao medicationDao = this.getMedicationDao(); Medication originalMedication = CheckIDUtil.checkMedicationId(modifiedMedication.getId(), medicationDao); MedicationOutVO original = medicationDao.toMedicationOutVO(originalMedication); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MEDICATION); } checkMedicationInput(modifiedMedication); if (!modifiedMedication.getProbandId().equals(originalMedication.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MEDICATION_PROBAND_CHANGED); } medicationDao.evict(originalMedication); Medication medication = medicationDao.medicationInVOToEntity(modifiedMedication); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalMedication, medication, now, user); medicationDao.update(medication); MedicationOutVO result = medicationDao.toMedicationOutVO(medication); ServiceUtil.logSystemMessage(medication.getProband(), result.getProband(), now, user, SystemMessageCodes.MEDICATION_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected MoneyTransferOutVO handleUpdateMoneyTransfer( AuthenticationVO auth, MoneyTransferInVO modifiedMoneyTransfer, Long maxAllowedCostTypes) throws Exception { MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); MoneyTransfer originalMoneyTransfer = CheckIDUtil.checkMoneyTransferId(modifiedMoneyTransfer.getId(), moneyTransferDao); MoneyTransferOutVO original = moneyTransferDao.toMoneyTransferOutVO(originalMoneyTransfer); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_MONEY_TRANSFER); } checkMoneyTransferInput(modifiedMoneyTransfer, maxAllowedCostTypes); if (!modifiedMoneyTransfer.getProbandId().equals(originalMoneyTransfer.getProband().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.MONEY_TRANSFER_PROBAND_CHANGED); } moneyTransferDao.evict(originalMoneyTransfer); MoneyTransfer moneyTransfer = moneyTransferDao.moneyTransferInVOToEntity(modifiedMoneyTransfer); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalMoneyTransfer, moneyTransfer, now, user); moneyTransferDao.update(moneyTransfer); Trial trial = moneyTransfer.getTrial(); MoneyTransferOutVO result = moneyTransferDao.toMoneyTransferOutVO(moneyTransfer); if (trial != null) { logSystemMessage(trial, result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_UPDATED, result, original, this.getJournalEntryDao()); } ServiceUtil.logSystemMessage(moneyTransfer.getProband(), result.getProband(), now, user, SystemMessageCodes.MONEY_TRANSFER_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleUpdatePrivacyConsentStatus( AuthenticationVO auth, Long probandId, Long version, Long privacyConsentStatusTypeId) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); PrivacyConsentStatusTypeDao privacyConsentStatusTypeDao = this.getPrivacyConsentStatusTypeDao(); PrivacyConsentStatusType state = CheckIDUtil.checkPrivacyConsentStatusTypeId(privacyConsentStatusTypeId, privacyConsentStatusTypeDao); ServiceUtil.checkProbandLocked(proband); boolean validState = false; Iterator<PrivacyConsentStatusType> statesIt = privacyConsentStatusTypeDao.findTransitions(proband.getPrivacyConsentStatus().getId()).iterator(); while (statesIt.hasNext()) { if (state.equals(statesIt.next())) { validState = true; break; } } if (!validState) { throw L10nUtil.initServiceException(ServiceExceptionCodes.INVALID_NEW_PRIVACY_CONSENT_STATUS_TYPE, L10nUtil.getPrivacyConsentStatusTypeName(Locales.USER, state.getNameL10nKey())); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); proband.setPrivacyConsentStatus(state); probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PRIVACY_CONSENT_STATUS_TYPE_UPDATED, result, original, this.getJournalEntryDao()); return probandDao.toProbandOutVO(proband); } @Override protected ProbandOutVO handleUpdateProband(AuthenticationVO auth, ProbandInVO modifiedProband, Integer maxInstances, Integer maxParentsDepth, Integer maxChildrenDepth) throws Exception { ProbandDao probandDao = this.getProbandDao(); User user = CoreUtil.getUser(); this.getUserDao().lock(user, LockMode.PESSIMISTIC_WRITE); Proband originalProband = CheckIDUtil.checkProbandId(modifiedProband.getId(), probandDao, LockMode.PESSIMISTIC_WRITE); ProbandOutVO original = probandDao.toProbandOutVO(originalProband, maxInstances, maxParentsDepth, maxChildrenDepth); if (modifiedProband.getBlinded()) { if (!user.getDepartment().getId().equals(modifiedProband.getDepartmentId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_EQUAL_TO_USER_DEPARTMENT); } if (!modifiedProband.getDepartmentId().equals(originalProband.getDepartment().getId())) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_CHANGED); } } else { if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } } checkProbandInput(modifiedProband); if (originalProband.isPerson() != modifiedProband.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_PERSON_FLAG_CHANGED); } boolean originalPrivacyConsentControl = originalProband.getCategory().isPrivacyConsentControl(); probandDao.evict(originalProband); Proband proband = probandDao.probandInVOToEntity(modifiedProband); checkProbandLoop(proband); Timestamp now = new Timestamp(System.currentTimeMillis()); CoreUtil.modifyVersion(originalProband, proband, now, user); if (!originalPrivacyConsentControl && proband.getCategory().isPrivacyConsentControl()) { ServiceUtil.resetAutoDeleteDeadline(proband, now); proband.setPrivacyConsentStatus(this.getPrivacyConsentStatusTypeDao().findInitialStates().iterator().next()); } probandDao.update(proband); ServiceUtil.notifyExpiringProbandAutoDelete(proband, now, this.getNotificationDao()); ProbandOutVO result = probandDao.toProbandOutVO(proband, maxInstances, maxParentsDepth, maxChildrenDepth); JournalEntryDao journalEntryDao = this.getJournalEntryDao(); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); Staff physician = proband.getPhysician(); if (physician != null) { ServiceUtil.logSystemMessage(physician, result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); } Iterator<ProbandOutVO> parentsIt = original.getParents().iterator(); while (parentsIt.hasNext()) { ProbandOutVO parent = parentsIt.next(); ServiceUtil.logSystemMessage(probandDao.load(parent.getId()), result, now, user, SystemMessageCodes.PROBAND_UPDATED, result, original, journalEntryDao); } return result; } @Override protected ProbandAddressOutVO handleUpdateProbandAddress( AuthenticationVO auth, ProbandAddressInVO modifiedProbandAddress) throws Exception { ProbandAddressDao addressDao = this.getProbandAddressDao(); ProbandAddress originalAddress = CheckIDUtil.checkProbandAddressId(modifiedProbandAddress.getId(), addressDao); ProbandAddressOutVO original = addressDao.toProbandAddressOutVO(originalAddress); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_ADDRESS); } checkProbandAddressInput(modifiedProbandAddress); addressDao.evict(originalAddress); ProbandAddress address = addressDao.probandAddressInVOToEntity(modifiedProbandAddress); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalAddress, address, now, user); addressDao.update(address); ProbandAddressOutVO result = addressDao.toProbandAddressOutVO(address); ServiceUtil.logSystemMessage(address.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_ADDRESS_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandOutVO handleUpdateProbandCategory( AuthenticationVO auth, Long probandId, Long version, Long categoryId, String comment) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao); ProbandOutVO original = probandDao.toProbandOutVO(proband); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND); } ProbandCategory category = CheckIDUtil.checkProbandCategoryId(categoryId, this.getProbandCategoryDao()); if (proband.isPerson()) { if (!category.isPerson()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_PERSON_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } } else { if (!category.isAnimal()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_CATEGORY_NOT_FOR_ANIMAL_ENTRIES, L10nUtil.getProbandCategoryName(Locales.USER, category.getNameL10nKey())); } } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(proband, version.longValue(), now, user); proband.setCategory(category); if (proband.isPerson()) { ProbandContactParticulars personParticulars = proband.getPersonParticulars(); if (personParticulars != null) { CipherText cipherText = CryptoUtil.encryptValue(comment); personParticulars.setCommentIv(cipherText.getIv()); personParticulars.setEncryptedComment(cipherText.getCipherText()); personParticulars.setCommentHash(CryptoUtil.hashForSearch(comment)); } } else { AnimalContactParticulars animalParticulars = proband.getAnimalParticulars(); if (animalParticulars != null) { animalParticulars.setComment(comment); } } probandDao.update(proband); ProbandOutVO result = probandDao.toProbandOutVO(proband); ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_CATEGORY_UPDATED, result, original, this.getJournalEntryDao()); return probandDao.toProbandOutVO(proband); } @Override protected ProbandContactDetailValueOutVO handleUpdateProbandContactDetailValue( AuthenticationVO auth, ProbandContactDetailValueInVO modifiedProbandContactDetailValue) throws Exception { ProbandContactDetailValueDao contactValueDao = this.getProbandContactDetailValueDao(); ProbandContactDetailValue originalContactValue = CheckIDUtil.checkProbandContactDetailValueId(modifiedProbandContactDetailValue.getId(), contactValueDao); ProbandContactDetailValueOutVO original = contactValueDao.toProbandContactDetailValueOutVO(originalContactValue); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_CONTACT_DETAIL_VALUE); } checkProbandContactDetailValueInput(modifiedProbandContactDetailValue); contactValueDao.evict(originalContactValue); ProbandContactDetailValue contactValue = contactValueDao.probandContactDetailValueInVOToEntity(modifiedProbandContactDetailValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalContactValue, contactValue, now, user); contactValueDao.update(contactValue); ProbandContactDetailValueOutVO result = contactValueDao.toProbandContactDetailValueOutVO(contactValue); ServiceUtil.logSystemMessage(contactValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_CONTACT_DETAIL_VALUE_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandStatusEntryOutVO handleUpdateProbandStatusEntry( AuthenticationVO auth, ProbandStatusEntryInVO modifiedProbandStatusEntry) throws Exception { ProbandStatusEntryDao statusEntryDao = this.getProbandStatusEntryDao(); ProbandStatusEntry originalStatusEntry = CheckIDUtil.checkProbandStatusEntryId(modifiedProbandStatusEntry.getId(), statusEntryDao); ProbandStatusEntryOutVO original = statusEntryDao.toProbandStatusEntryOutVO(originalStatusEntry); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_STATUS_ENTRY); } checkProbandStatusEntryInput(modifiedProbandStatusEntry); statusEntryDao.evict(originalStatusEntry); ProbandStatusEntry statusEntry = statusEntryDao.probandStatusEntryInVOToEntity(modifiedProbandStatusEntry); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalStatusEntry, statusEntry, now, user); statusEntryDao.update(statusEntry); notifyProbandInactive(statusEntry, now); ProbandStatusEntryOutVO result = statusEntryDao.toProbandStatusEntryOutVO(statusEntry); ServiceUtil.logSystemMessage(statusEntry.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_STATUS_ENTRY_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProbandTagValueOutVO handleUpdateProbandTagValue( AuthenticationVO auth, ProbandTagValueInVO modifiedProbandTagValue) throws Exception { ProbandTagValueDao tagValueDao = this.getProbandTagValueDao(); ProbandTagValue originalTagValue = CheckIDUtil.checkProbandTagValueId(modifiedProbandTagValue.getId(), tagValueDao); ProbandTagValueOutVO original = tagValueDao.toProbandTagValueOutVO(originalTagValue); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROBAND_TAG_VALUE); } checkProbandTagValueInput(modifiedProbandTagValue); tagValueDao.evict(originalTagValue); ProbandTagValue tagValue = tagValueDao.probandTagValueInVOToEntity(modifiedProbandTagValue); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalTagValue, tagValue, now, user); tagValueDao.update(tagValue); ProbandTagValueOutVO result = tagValueDao.toProbandTagValueOutVO(tagValue); ServiceUtil.logSystemMessage(tagValue.getProband(), result.getProband(), now, user, SystemMessageCodes.PROBAND_TAG_VALUE_UPDATED, result, original, this.getJournalEntryDao()); return result; } @Override protected ProcedureOutVO handleUpdateProcedure(AuthenticationVO auth, ProcedureInVO modifiedProcedure) throws Exception { ProcedureDao procedureDao = this.getProcedureDao(); Procedure originalProcedure = CheckIDUtil.checkProcedureId(modifiedProcedure.getId(), procedureDao); ProcedureOutVO original = procedureDao.toProcedureOutVO(originalProcedure); if (!original.isDecrypted()) { throw L10nUtil.initServiceException(ServiceExceptionCodes.CANNOT_DECRYPT_PROCEDURE); } checkProcedureInput(modifiedProcedure); procedureDao.evict(originalProcedure); Procedure procedure = procedureDao.procedureInVOToEntity(modifiedProcedure); Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); CoreUtil.modifyVersion(originalProcedure, procedure, now, user); procedureDao.update(procedure); ProcedureOutVO result = procedureDao.toProcedureOutVO(procedure); ServiceUtil.logSystemMessage(procedure.getProband(), result.getProband(), now, user, SystemMessageCodes.PROCEDURE_UPDATED, result, original, this.getJournalEntryDao()); return result; } private void notifyProbandInactive(ProbandStatusEntry statusEntry, Date now) throws Exception { NotificationDao notificationDao = this.getNotificationDao(); ServiceUtil.cancelNotifications(statusEntry.getNotifications(), notificationDao, null); // clears inventory_active AND inventory inactive booking notifications if (!statusEntry.getType().isProbandActive()) { if ((new DateInterval(statusEntry.getStart(), statusEntry.getStop())).contains(now)) { notificationDao.addNotification(statusEntry, now, null); } if (!(new DateInterval(statusEntry.getStart(), statusEntry.getStop())).isOver(now)) { VisitScheduleItemDao visitScheduleItemDao = this.getVisitScheduleItemDao(); Proband proband = statusEntry.getProband(); Iterator<ProbandListEntry> trialParticipationsIt = proband.getTrialParticipations().iterator(); while (trialParticipationsIt.hasNext()) { ProbandListEntry probandListEntry = trialParticipationsIt.next(); ProbandGroup probandGroup = probandListEntry.getGroup(); if (probandGroup != null) { Iterator<VisitScheduleItem> it = visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), statusEntry.getStart(), statusEntry.getStop()).iterator(); while (it.hasNext()) { notificationDao.addNotification(it.next(), proband, statusEntry, now, null); } } } } } } @Override protected ProbandOutVO handleUpdateProbandDepartment(AuthenticationVO auth, Long probandId, Long newDepartmentId, String plainNewDepartmentPassword, String plainOldDepartmentPassword) throws Exception { ProbandDao probandDao = this.getProbandDao(); Proband proband = CheckIDUtil.checkProbandId(probandId, probandDao, LockMode.PESSIMISTIC_WRITE); Department newDepartment = CheckIDUtil.checkDepartmentId(newDepartmentId, this.getDepartmentDao()); if (plainNewDepartmentPassword == null) { plainNewDepartmentPassword = CoreUtil.getUserContext().getPlainDepartmentPassword(); } if (plainOldDepartmentPassword == null) { plainOldDepartmentPassword = CoreUtil.getUserContext().getPlainDepartmentPassword(); } Department oldDepartment = proband.getDepartment(); if (!oldDepartment.equals(newDepartment)) { if (!CryptoUtil.checkDepartmentPassword(newDepartment, plainNewDepartmentPassword)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.DEPARTMENT_PASSWORD_WRONG); } if (!CryptoUtil.checkDepartmentPassword(oldDepartment, plainOldDepartmentPassword)) { throw L10nUtil.initServiceException(ServiceExceptionCodes.OLD_DEPARTMENT_PASSWORD_WRONG); } Timestamp now = new Timestamp(System.currentTimeMillis()); User user = CoreUtil.getUser(); SecretKey newDepartmentKey = ReEncrypter.getDepartmenKey(newDepartment, plainNewDepartmentPassword); SecretKey oldDepartmentKey = ReEncrypter.getDepartmenKey(oldDepartment, plainOldDepartmentPassword); ProbandOutVO original = probandDao.toProbandOutVO(proband); probandDao.reEncrypt(proband, oldDepartmentKey, newDepartmentKey); proband.setDepartment(newDepartment); CoreUtil.modifyVersion(proband, proband.getVersion(), now, user); probandDao.update(proband); ProbandOutVO result = probandDao.toProbandOutVO(proband); ProbandTagValueDao probandTagValueDao = this.getProbandTagValueDao(); Iterator<ProbandTagValue> tagValuesIt = proband.getTagValues().iterator(); while (tagValuesIt.hasNext()) { ProbandTagValue tagValue = tagValuesIt.next(); probandTagValueDao.reEncrypt(tagValue, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(tagValue, tagValue.getVersion(), now, user); probandTagValueDao.update(tagValue); } ProbandContactDetailValueDao probandContactDetailValueDao = this.getProbandContactDetailValueDao(); Iterator<ProbandContactDetailValue> contactDetailValuesIt = proband.getContactDetailValues().iterator(); while (contactDetailValuesIt.hasNext()) { ProbandContactDetailValue contactDetailValue = contactDetailValuesIt.next(); probandContactDetailValueDao.reEncrypt(contactDetailValue, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(contactDetailValue, contactDetailValue.getVersion(), now, user); probandContactDetailValueDao.update(contactDetailValue); } ProbandAddressDao probandAddressDao = this.getProbandAddressDao(); Iterator<ProbandAddress> addressesIt = proband.getAddresses().iterator(); while (addressesIt.hasNext()) { ProbandAddress address = addressesIt.next(); probandAddressDao.reEncrypt(address, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(address, address.getVersion(), now, user); probandAddressDao.update(address); } ProbandStatusEntryDao probandStatusEntryDao = this.getProbandStatusEntryDao(); Iterator<ProbandStatusEntry> statusEntriesIt = proband.getStatusEntries().iterator(); while (statusEntriesIt.hasNext()) { ProbandStatusEntry statusEntry = statusEntriesIt.next(); probandStatusEntryDao.reEncrypt(statusEntry, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(statusEntry, statusEntry.getVersion(), now, user); probandStatusEntryDao.update(statusEntry); } MedicationDao medicationDao = this.getMedicationDao(); Iterator<Medication> medicationsIt = proband.getMedications().iterator(); while (medicationsIt.hasNext()) { Medication medication = medicationsIt.next(); medicationDao.reEncrypt(medication, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(medication, medication.getVersion(), now, user); medicationDao.update(medication); } DiagnosisDao diagnosisDao = this.getDiagnosisDao(); Iterator<Diagnosis> diagnosesIt = proband.getDiagnoses().iterator(); while (diagnosesIt.hasNext()) { Diagnosis diagnosis = diagnosesIt.next(); diagnosisDao.reEncrypt(diagnosis, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(diagnosis, diagnosis.getVersion(), now, user); diagnosisDao.update(diagnosis); } ProcedureDao procedureDao = this.getProcedureDao(); Iterator<Procedure> proceduresIt = proband.getProcedures().iterator(); while (proceduresIt.hasNext()) { Procedure procedure = proceduresIt.next(); procedureDao.reEncrypt(procedure, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(procedure, procedure.getVersion(), now, user); procedureDao.update(procedure); } MoneyTransferDao moneyTransferDao = this.getMoneyTransferDao(); Iterator<MoneyTransfer> moneyTransfersIt = proband.getMoneyTransfers().iterator(); while (moneyTransfersIt.hasNext()) { MoneyTransfer moneyTransfer = moneyTransfersIt.next(); moneyTransferDao.reEncrypt(moneyTransfer, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(moneyTransfer, moneyTransfer.getVersion(), now, user); moneyTransferDao.update(moneyTransfer); } BankAccountDao bankAccountDao = this.getBankAccountDao(); Iterator<BankAccount> bankAccountIt = proband.getBankAccounts().iterator(); while (bankAccountIt.hasNext()) { BankAccount bankAccount = bankAccountIt.next(); bankAccountDao.reEncrypt(bankAccount, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(bankAccount, bankAccount.getVersion(), now, user); bankAccountDao.update(bankAccount); } //no re-encryption for proband list status entries, as those are encryted by the creating user // Iterator<ProbandListEntry> trialParticipationsIt = proband.getTrialParticipations().iterator(); // while (trialParticipationsIt.hasNext()) { // Iterator<ProbandListStatusEntry> probandListStatusEntriesIt = trialParticipationsIt.next().getStatusEntries().iterator(); // while (probandListStatusEntriesIt.hasNext()) { // ProbandListStatusEntry probandListStatusEntry = probandListStatusEntriesIt.next(); // probandListStatusEntryDao.reEncrypt(probandListStatusEntry, oldDepartmentKey, newDepartmentKey); // CoreUtil.modifyVersion(probandListStatusEntry,probandListStatusEntry.getVersion(), now, user); // probandListStatusEntryDao.update(probandListStatusEntry); // } // } MassMailRecipientDao massMailRecipientDao = this.getMassMailRecipientDao(); Iterator<MassMailRecipient> massMailReceiptsIt = proband.getMassMailReceipts().iterator(); while (massMailReceiptsIt.hasNext()) { MassMailRecipient recipient = massMailReceiptsIt.next(); massMailRecipientDao.reEncrypt(recipient, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(recipient, recipient.getVersion(), now, user); massMailRecipientDao.update(recipient); } //no re-encryption for journal entries, as those are encryted by the creating user //JournalEntryDao journalEntryDao = this.getJournalEntryDao(); //Iterator<JournalEntry> journalEntriesIt = proband.getJournalEntries().iterator(); //while (journalEntriesIt.hasNext()) { // JournalEntry journalEntry = journalEntriesIt.next(); // journalEntryDao.reEncrypt(journalEntry, oldDepartmentKey, newDepartmentKey); // CoreUtil.modifyVersion(journalEntry, journalEntry.getVersion(), now, user); // journalEntryDao.update(journalEntry); //} FileDao fileDao = this.getFileDao(); Iterator<File> filesIt = proband.getFiles().iterator(); while (filesIt.hasNext()) { File file = filesIt.next(); fileDao.reEncrypt(file, oldDepartmentKey, newDepartmentKey); CoreUtil.modifyVersion(file, file.getVersion(), now, user); fileDao.update(file); } ServiceUtil.logSystemMessage(proband, result, now, user, SystemMessageCodes.PROBAND_DEPARTMENT_UPDATED, result, original, this.getJournalEntryDao()); return result; } else { throw L10nUtil.initServiceException(ServiceExceptionCodes.PROBAND_DEPARTMENT_NOT_CHANGED); } } }
dynamic visit schedule - proband service - proband and staff status collisions
core/src/main/java/org/phoenixctms/ctsms/service/proband/ProbandServiceImpl.java
dynamic visit schedule - proband service
<ide><path>ore/src/main/java/org/phoenixctms/ctsms/service/proband/ProbandServiceImpl.java <ide> Collection<VisitScheduleItem> visitScheduleItems; <ide> switch (style) { <ide> case PROBAND_VISIT_SCHEDULE: <del> visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(null, null, null, probandVO.getId(), null, null); <add> visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(null, null, null, probandVO.getId(), null, true, null); <ide> break; <ide> case PROBAND_TRIAL_VISIT_SCHEDULE: <del> visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(trialVO.getId(), null, null, probandVO.getId(), null, null); <add> visitScheduleItems = visitScheduleItemDao.findByTrialGroupVisitProbandTravel(trialVO.getId(), null, null, probandVO.getId(), null, true, null); <ide> break; <ide> default: <ide> visitScheduleItems = null; <ide> } <del> VisitScheduleExcelVO result = ServiceUtil.creatVisitScheduleExcel(visitScheduleItems, style, probandVO, trialVO, <add> VisitScheduleExcelVO result = ServiceUtil.createVisitScheduleExcel(visitScheduleItems, style, probandVO, trialVO, <ide> visitScheduleItemDao, <ide> this.getProbandListStatusEntryDao(), <ide> this.getProbandAddressDao(), <ide> ProbandGroup probandGroup = probandListEntry.getGroup(); <ide> if (probandGroup != null) { <ide> collidingVisitScheduleItems <del> .addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), probandStatusEntry.getStart(), <add> .addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), probandListEntry.getProband().getId(), <add> probandStatusEntry.getStart(), <ide> probandStatusEntry.getStop())); <ide> } else { <ide> if (allProbandGroups) { <del> collidingVisitScheduleItems.addAll(visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), null, probandStatusEntry.getStart(), <del> probandStatusEntry.getStop())); <add> collidingVisitScheduleItems.addAll( <add> visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), null, probandListEntry.getProband().getId(), probandStatusEntry.getStart(), <add> probandStatusEntry.getStop())); <ide> } <ide> } <ide> } <ide> ProbandListEntry probandListEntry = trialParticipationsIt.next(); <ide> ProbandGroup probandGroup = probandListEntry.getGroup(); <ide> if (probandGroup != null) { <del> Iterator<VisitScheduleItem> it = visitScheduleItemDao.findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), statusEntry.getStart(), <del> statusEntry.getStop()).iterator(); <add> Iterator<VisitScheduleItem> it = visitScheduleItemDao <add> .findByInterval(probandListEntry.getTrial().getId(), probandGroup.getId(), proband.getId(), statusEntry.getStart(), <add> statusEntry.getStop()) <add> .iterator(); <ide> while (it.hasNext()) { <ide> notificationDao.addNotification(it.next(), proband, statusEntry, now, null); <ide> }
Java
mit
c66aec8c1a8bf4758d07b1729da3526a326d4430
0
ferstl/parallel-stream-support
package com.github.ferstl.streams; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Optional; import java.util.Spliterator; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.function.UnaryOperator; import java.util.stream.Collector; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static java.util.Arrays.stream; import static java.util.Objects.requireNonNull; import static java.util.stream.StreamSupport.stream; /** * <p> * An implementation of {@link Stream} which uses a custom {@link ForkJoinPool} for parallel aggregations. The * following example illustrates an aggregate operation using {@link ParallelStreamSupport} with a custom * {@link ForkJoinPool}: * * <pre> * * ForkJoinPool pool = new ForkJoinPool(); * int sum = ParallelStreamSupport.parallelStream(widgets, pool) * .filter(w -> w.getColor() == RED) * .mapToInt(w -> w.getWeight()) * .sum(); * </pre> * </p> * <p> * In case this stream is configured for parallel execution, i.e. {@link #isParallel()} returns {@code true}, a * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operation</a> will be executed as {@link ForkJoinTask} in the custom {@link ForkJoinPool}. Otherwise it will be * executed in the calling thread. * </p> * <p> * This implementation offers various factory methods which are based on: * <ul> * <li>The static factory methods of {@link Stream}, which are meaningful for parallel streams</li> * <li>{@link Collection#parallelStream()}</li> * <li>{@link Arrays#stream(Object[])}</li> * <li>{@link StreamSupport#stream(Spliterator, boolean)}</li> * <li>{@link StreamSupport#stream(Supplier, int, boolean))}</li> * </ul> * </p> * * @apiNote Internally, this stream wraps a stream which is initially created in one of the static factory methods. * Whenever a non-terminal operation is called the underlying stream will be replaced with the result of calling the * same method on that stream. Although each factory method returns a parallel stream, calling {@link #sequential()} is * still possible and leads to sequential execution of a terminal operation within the calling thread. */ public class ParallelStreamSupport<T> extends AbstractParallelStreamSupport<T, Stream<T>> implements Stream<T> { /** * Constructor for internal use within this package only. * * @param delegate Stream to delegate each operation. * @param workerPool Worker pool for executing terminaloperations in parallel. Must not be null. */ ParallelStreamSupport(Stream<T> delegate, ForkJoinPool workerPool) { super(delegate, workerPool); } /** * Creates a <strong>parallel</strong> stream from the given Collection. This operation is similar to * {@link Collection#parallelStream()} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param collection Collection to create the parallel stream from. Must not be null. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Collection#parallelStream() */ public static <T> Stream<T> parallelStream(Collection<T> collection, ForkJoinPool workerPool) { requireNonNull(collection, "Collection must not be null"); return new ParallelStreamSupport<>(collection.parallelStream(), workerPool); } /** * Creates a <strong>parallel</strong> stream from the given Array. This operation is similar to * {@link Arrays#stream(Object[])} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param array Array to create the parallel stream from. Must not be null. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Arrays#stream(Object[]) */ public static <T> Stream<T> parallelStream(T[] array, ForkJoinPool workerPool) { requireNonNull(array, "Array must not be null"); return new ParallelStreamSupport<>(stream(array).parallel(), workerPool); } /** * Creates a <strong>parallel</strong> stream from the given Spliterator. This operation is similar to * {@link StreamSupport#stream(Spliterator, boolean)} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param spliterator A {@code Spliterator} describing the stream elements. Must not be null. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see StreamSupport#stream(Spliterator, boolean) */ public static <T> Stream<T> parallelStream(Spliterator<T> spliterator, ForkJoinPool workerPool) { requireNonNull(spliterator, "Spliterator must not be null"); return new ParallelStreamSupport<>(stream(spliterator, true), workerPool); } /** * Creates a <strong>parallel</strong> stream from the given Spliterator supplier. This operation is similar to * {@link StreamSupport#longStream(Supplier, int, boolean))} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param supplier A {@code Supplier} of a {@code Spliterator}. Must not be null. * @param characteristics Spliterator characteristics of the supplied {@code Spliterator}. The characteristics must * be equal to {@code supplier.get().characteristics()}, otherwise undefined behavior may occur when terminal * operation commences. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see StreamSupport#stream(Supplier, int, boolean) */ public static <T> Stream<T> parallelStream(Supplier<? extends Spliterator<T>> supplier, int characteristics, ForkJoinPool workerPool) { requireNonNull(supplier, "Supplier must not be null"); return new ParallelStreamSupport<>(stream(supplier, characteristics, true), workerPool); } /** * Creates a <strong>parallel</strong> stream from the {@link Builder}. This operation is similar to calling * {@code builder.build().parallel()} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param builder The builder to create the stream from. Must not be null. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Stream#builder() */ public static <T> Stream<T> parallelStream(Builder<T> builder, ForkJoinPool workerPool) { requireNonNull(builder, "Builder must not be null"); return new ParallelStreamSupport<>(builder.build().parallel(), workerPool); } /** * Creates a <strong>parallel</strong> infinite ordered stream produced by iterative application of a function * {@code f} to an initial element {@code seed}. This operation is similar to calling {@code Stream.iterate(seed, * operator).parallel()} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param seed The initial element. * @param operator A function to be applied to to the previous element to produce a new element * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Stream#iterate(Object, UnaryOperator) */ public static <T> Stream<T> iterate(T seed, UnaryOperator<T> operator, ForkJoinPool workerPool) { requireNonNull(operator, "Operator must not be null"); return new ParallelStreamSupport<>(Stream.iterate(seed, operator).parallel(), workerPool); } /** * Creates a <strong>parallel</strong> infinite sequential unordered stream where each element is generated by the * provided {@code Supplier}. This operation is similar to calling {@code Stream.generate(supplier).parallel()} with * the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param supplier The {@code Supplier} of generated elements. * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Stream#generate(Supplier) */ public static <T> Stream<T> generate(Supplier<T> supplier, ForkJoinPool workerPool) { requireNonNull(supplier, "Supplier must not be null"); return new ParallelStreamSupport<>(Stream.generate(supplier).parallel(), workerPool); } /** * Creates a lazily concatenated <strong>parallel</strong> stream whose elements are all the elements of the first * stream followed by all the elements of the second stream. his operation is similar to calling * {@code Stream.concat(a, b).parallel()} with the difference that parallel * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operations</a> will be executed in the given {@link ForkJoinPool}. * * @param <T> The type of stream elements. * @param a The first stream * @param b The second stream * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. * @see Stream#concat(Stream, Stream) */ public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b, ForkJoinPool workerPool) { requireNonNull(a, "Stream a must not be null"); requireNonNull(b, "Stream b must not be null"); return new ParallelStreamSupport<>(Stream.concat(a, b).parallel(), workerPool); } @Override public Stream<T> filter(Predicate<? super T> predicate) { this.delegate = this.delegate.filter(predicate); return this; } @Override public <R> Stream<R> map(Function<? super T, ? extends R> mapper) { return new ParallelStreamSupport<>(this.delegate.map(mapper), this.workerPool); } @Override public IntStream mapToInt(ToIntFunction<? super T> mapper) { return new ParallelIntStreamSupport(this.delegate.mapToInt(mapper), this.workerPool); } @Override public LongStream mapToLong(ToLongFunction<? super T> mapper) { return new ParallelLongStreamSupport(this.delegate.mapToLong(mapper), this.workerPool); } @Override public DoubleStream mapToDouble(ToDoubleFunction<? super T> mapper) { return new ParallelDoubleStreamSupport(this.delegate.mapToDouble(mapper), this.workerPool); } @Override public <R> Stream<R> flatMap(Function<? super T, ? extends Stream<? extends R>> mapper) { return new ParallelStreamSupport<>(this.delegate.flatMap(mapper), this.workerPool); } @Override public IntStream flatMapToInt(Function<? super T, ? extends IntStream> mapper) { return new ParallelIntStreamSupport(this.delegate.flatMapToInt(mapper), this.workerPool); } @Override public LongStream flatMapToLong(Function<? super T, ? extends LongStream> mapper) { return new ParallelLongStreamSupport(this.delegate.flatMapToLong(mapper), this.workerPool); } @Override public DoubleStream flatMapToDouble(Function<? super T, ? extends DoubleStream> mapper) { return new ParallelDoubleStreamSupport(this.delegate.flatMapToDouble(mapper), this.workerPool); } @Override public Stream<T> distinct() { this.delegate = this.delegate.distinct(); return this; } @Override public Stream<T> sorted() { this.delegate = this.delegate.sorted(); return this; } @Override public Stream<T> sorted(Comparator<? super T> comparator) { this.delegate = this.delegate.sorted(comparator); return this; } @Override public Stream<T> peek(Consumer<? super T> action) { this.delegate = this.delegate.peek(action); return this; } @Override public Stream<T> limit(long maxSize) { this.delegate = this.delegate.limit(maxSize); return this; } @Override public Stream<T> skip(long n) { this.delegate = this.delegate.skip(n); return this; } // Terminal operations @Override public void forEach(Consumer<? super T> action) { execute(() -> this.delegate.forEach(action)); } @Override public void forEachOrdered(Consumer<? super T> action) { execute(() -> this.delegate.forEachOrdered(action)); } @Override public Object[] toArray() { return execute(() -> this.delegate.toArray()); } @Override public <A> A[] toArray(IntFunction<A[]> generator) { return execute(() -> this.delegate.toArray(generator)); } @Override public T reduce(T identity, BinaryOperator<T> accumulator) { return execute(() -> this.delegate.reduce(identity, accumulator)); } @Override public Optional<T> reduce(BinaryOperator<T> accumulator) { return execute(() -> this.delegate.reduce(accumulator)); } @Override public <U> U reduce(U identity, BiFunction<U, ? super T, U> accumulator, BinaryOperator<U> combiner) { return execute(() -> this.delegate.reduce(identity, accumulator, combiner)); } @Override public <R> R collect(Supplier<R> supplier, BiConsumer<R, ? super T> accumulator, BiConsumer<R, R> combiner) { return execute(() -> this.delegate.collect(supplier, accumulator, combiner)); } @Override public <R, A> R collect(Collector<? super T, A, R> collector) { return execute(() -> this.delegate.collect(collector)); } @Override public Optional<T> min(Comparator<? super T> comparator) { return execute(() -> this.delegate.min(comparator)); } @Override public Optional<T> max(Comparator<? super T> comparator) { return execute(() -> this.delegate.max(comparator)); } @Override public long count() { return execute(() -> this.delegate.count()); } @Override public boolean anyMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.anyMatch(predicate)); } @Override public boolean allMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.allMatch(predicate)); } @Override public boolean noneMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.noneMatch(predicate)); } @Override public Optional<T> findFirst() { return execute(() -> this.delegate.findFirst()); } @Override public Optional<T> findAny() { return execute(() -> this.delegate.findAny()); } }
src/main/java/com/github/ferstl/streams/ParallelStreamSupport.java
package com.github.ferstl.streams; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Optional; import java.util.Spliterator; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.function.UnaryOperator; import java.util.stream.Collector; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static java.util.Arrays.stream; import static java.util.Objects.requireNonNull; import static java.util.stream.StreamSupport.stream; /** * <p> * An implementation of {@link Stream} which uses a custom {@link ForkJoinPool} for parallel aggregations. The * following example illustrates an aggregate operation using {@link ParallelStreamSupport} with a custom * {@link ForkJoinPool}: * * <pre> * * ForkJoinPool pool = new ForkJoinPool(); * int sum = ParallelStreamSupport.parallelStream(widgets, pool) * .filter(w -> w.getColor() == RED) * .mapToInt(w -> w.getWeight()) * .sum(); * </pre> * </p> * <p> * In case this stream is configured for parallel execution, i.e. {@link #isParallel()} returns {@code true}, a * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal * operation</a> will be executed as {@link ForkJoinTask} in the custom {@link ForkJoinPool}. Otherwise it will be * executed in the calling thread. * </p> * <p> * This implementation offers various factory methods which are based on: * <ul> * <li>The static factory methods of {@link Stream}, which are meaningful for parallel streams</li> * <li>{@link Collection#parallelStream()}</li> * <li>{@link Arrays#stream(Object[])}</li> * <li>{@link StreamSupport#stream(Spliterator, boolean)}</li> * <li>{@link StreamSupport#stream(Supplier, int, boolean))}</li> * </ul> * </p> * * @apiNote Internally, this stream wraps a stream which is initially created in one of the static factory methods. * Whenever a non-terminal operation is called the underlying stream will be replaced with the result of calling the * same method on that stream. Although each factory method returns a parallel stream, calling {@link #sequential()} is * still possible and leads to sequential execution of a terminal operation within the calling thread. */ public class ParallelStreamSupport<T> extends AbstractParallelStreamSupport<T, Stream<T>> implements Stream<T> { ParallelStreamSupport(Stream<T> delegate, ForkJoinPool workerPool) { super(delegate, workerPool); } public static <T> Stream<T> parallelStream(Collection<T> collection, ForkJoinPool workerPool) { requireNonNull(collection, "Collection must not be null"); return new ParallelStreamSupport<>(collection.parallelStream(), workerPool); } public static <T> Stream<T> parallelStream(T[] array, ForkJoinPool workerPool) { requireNonNull(array, "Array must not be null"); return new ParallelStreamSupport<>(stream(array).parallel(), workerPool); } public static <T> Stream<T> parallelStream(Spliterator<T> spliterator, ForkJoinPool workerPool) { requireNonNull(spliterator, "Spliterator must not be null"); return new ParallelStreamSupport<>(stream(spliterator, true), workerPool); } public static <T> Stream<T> parallelStream(Supplier<? extends Spliterator<T>> supplier, int characteristics, ForkJoinPool workerPool) { requireNonNull(supplier, "Supplier must not be null"); return new ParallelStreamSupport<>(stream(supplier, characteristics, true), workerPool); } public static <T> Stream<T> parallelStream(Builder<T> builder, ForkJoinPool workerPool) { requireNonNull(builder, "Builder must not be null"); return new ParallelStreamSupport<>(builder.build().parallel(), workerPool); } public static <T> Stream<T> iterate(T seed, UnaryOperator<T> operator, ForkJoinPool workerPool) { requireNonNull(operator, "Operator must not be null"); return new ParallelStreamSupport<>(Stream.iterate(seed, operator).parallel(), workerPool); } public static <T> Stream<T> generate(Supplier<T> supplier, ForkJoinPool workerPool) { requireNonNull(supplier, "Supplier must not be null"); return new ParallelStreamSupport<>(Stream.generate(supplier).parallel(), workerPool); } public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b, ForkJoinPool workerPool) { requireNonNull(a, "Stream a must not be null"); requireNonNull(b, "Stream b must not be null"); return new ParallelStreamSupport<>(Stream.concat(a, b).parallel(), workerPool); } @Override public Stream<T> filter(Predicate<? super T> predicate) { this.delegate = this.delegate.filter(predicate); return this; } @Override public <R> Stream<R> map(Function<? super T, ? extends R> mapper) { return new ParallelStreamSupport<>(this.delegate.map(mapper), this.workerPool); } @Override public IntStream mapToInt(ToIntFunction<? super T> mapper) { return new ParallelIntStreamSupport(this.delegate.mapToInt(mapper), this.workerPool); } @Override public LongStream mapToLong(ToLongFunction<? super T> mapper) { return new ParallelLongStreamSupport(this.delegate.mapToLong(mapper), this.workerPool); } @Override public DoubleStream mapToDouble(ToDoubleFunction<? super T> mapper) { return new ParallelDoubleStreamSupport(this.delegate.mapToDouble(mapper), this.workerPool); } @Override public <R> Stream<R> flatMap(Function<? super T, ? extends Stream<? extends R>> mapper) { return new ParallelStreamSupport<>(this.delegate.flatMap(mapper), this.workerPool); } @Override public IntStream flatMapToInt(Function<? super T, ? extends IntStream> mapper) { return new ParallelIntStreamSupport(this.delegate.flatMapToInt(mapper), this.workerPool); } @Override public LongStream flatMapToLong(Function<? super T, ? extends LongStream> mapper) { return new ParallelLongStreamSupport(this.delegate.flatMapToLong(mapper), this.workerPool); } @Override public DoubleStream flatMapToDouble(Function<? super T, ? extends DoubleStream> mapper) { return new ParallelDoubleStreamSupport(this.delegate.flatMapToDouble(mapper), this.workerPool); } @Override public Stream<T> distinct() { this.delegate = this.delegate.distinct(); return this; } @Override public Stream<T> sorted() { this.delegate = this.delegate.sorted(); return this; } @Override public Stream<T> sorted(Comparator<? super T> comparator) { this.delegate = this.delegate.sorted(comparator); return this; } @Override public Stream<T> peek(Consumer<? super T> action) { this.delegate = this.delegate.peek(action); return this; } @Override public Stream<T> limit(long maxSize) { this.delegate = this.delegate.limit(maxSize); return this; } @Override public Stream<T> skip(long n) { this.delegate = this.delegate.skip(n); return this; } // Terminal operations @Override public void forEach(Consumer<? super T> action) { execute(() -> this.delegate.forEach(action)); } @Override public void forEachOrdered(Consumer<? super T> action) { execute(() -> this.delegate.forEachOrdered(action)); } @Override public Object[] toArray() { return execute(() -> this.delegate.toArray()); } @Override public <A> A[] toArray(IntFunction<A[]> generator) { return execute(() -> this.delegate.toArray(generator)); } @Override public T reduce(T identity, BinaryOperator<T> accumulator) { return execute(() -> this.delegate.reduce(identity, accumulator)); } @Override public Optional<T> reduce(BinaryOperator<T> accumulator) { return execute(() -> this.delegate.reduce(accumulator)); } @Override public <U> U reduce(U identity, BiFunction<U, ? super T, U> accumulator, BinaryOperator<U> combiner) { return execute(() -> this.delegate.reduce(identity, accumulator, combiner)); } @Override public <R> R collect(Supplier<R> supplier, BiConsumer<R, ? super T> accumulator, BiConsumer<R, R> combiner) { return execute(() -> this.delegate.collect(supplier, accumulator, combiner)); } @Override public <R, A> R collect(Collector<? super T, A, R> collector) { return execute(() -> this.delegate.collect(collector)); } @Override public Optional<T> min(Comparator<? super T> comparator) { return execute(() -> this.delegate.min(comparator)); } @Override public Optional<T> max(Comparator<? super T> comparator) { return execute(() -> this.delegate.max(comparator)); } @Override public long count() { return execute(() -> this.delegate.count()); } @Override public boolean anyMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.anyMatch(predicate)); } @Override public boolean allMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.allMatch(predicate)); } @Override public boolean noneMatch(Predicate<? super T> predicate) { return execute(() -> this.delegate.noneMatch(predicate)); } @Override public Optional<T> findFirst() { return execute(() -> this.delegate.findFirst()); } @Override public Optional<T> findAny() { return execute(() -> this.delegate.findAny()); } }
Document constructor and static factory methods.
src/main/java/com/github/ferstl/streams/ParallelStreamSupport.java
Document constructor and static factory methods.
<ide><path>rc/main/java/com/github/ferstl/streams/ParallelStreamSupport.java <ide> */ <ide> public class ParallelStreamSupport<T> extends AbstractParallelStreamSupport<T, Stream<T>> implements Stream<T> { <ide> <add> /** <add> * Constructor for internal use within this package only. <add> * <add> * @param delegate Stream to delegate each operation. <add> * @param workerPool Worker pool for executing terminaloperations in parallel. Must not be null. <add> */ <ide> ParallelStreamSupport(Stream<T> delegate, ForkJoinPool workerPool) { <ide> super(delegate, workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> stream from the given Collection. This operation is similar to <add> * {@link Collection#parallelStream()} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param collection Collection to create the parallel stream from. Must not be null. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Collection#parallelStream() <add> */ <ide> public static <T> Stream<T> parallelStream(Collection<T> collection, ForkJoinPool workerPool) { <ide> requireNonNull(collection, "Collection must not be null"); <ide> <ide> return new ParallelStreamSupport<>(collection.parallelStream(), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> stream from the given Array. This operation is similar to <add> * {@link Arrays#stream(Object[])} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param array Array to create the parallel stream from. Must not be null. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Arrays#stream(Object[]) <add> */ <ide> public static <T> Stream<T> parallelStream(T[] array, ForkJoinPool workerPool) { <ide> requireNonNull(array, "Array must not be null"); <ide> <ide> return new ParallelStreamSupport<>(stream(array).parallel(), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> stream from the given Spliterator. This operation is similar to <add> * {@link StreamSupport#stream(Spliterator, boolean)} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param spliterator A {@code Spliterator} describing the stream elements. Must not be null. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see StreamSupport#stream(Spliterator, boolean) <add> */ <ide> public static <T> Stream<T> parallelStream(Spliterator<T> spliterator, ForkJoinPool workerPool) { <ide> requireNonNull(spliterator, "Spliterator must not be null"); <ide> <ide> return new ParallelStreamSupport<>(stream(spliterator, true), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> stream from the given Spliterator supplier. This operation is similar to <add> * {@link StreamSupport#longStream(Supplier, int, boolean))} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param supplier A {@code Supplier} of a {@code Spliterator}. Must not be null. <add> * @param characteristics Spliterator characteristics of the supplied {@code Spliterator}. The characteristics must <add> * be equal to {@code supplier.get().characteristics()}, otherwise undefined behavior may occur when terminal <add> * operation commences. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see StreamSupport#stream(Supplier, int, boolean) <add> */ <ide> public static <T> Stream<T> parallelStream(Supplier<? extends Spliterator<T>> supplier, int characteristics, ForkJoinPool workerPool) { <ide> requireNonNull(supplier, "Supplier must not be null"); <ide> <ide> return new ParallelStreamSupport<>(stream(supplier, characteristics, true), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> stream from the {@link Builder}. This operation is similar to calling <add> * {@code builder.build().parallel()} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param builder The builder to create the stream from. Must not be null. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Stream#builder() <add> */ <ide> public static <T> Stream<T> parallelStream(Builder<T> builder, ForkJoinPool workerPool) { <ide> requireNonNull(builder, "Builder must not be null"); <ide> <ide> return new ParallelStreamSupport<>(builder.build().parallel(), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> infinite ordered stream produced by iterative application of a function <add> * {@code f} to an initial element {@code seed}. This operation is similar to calling {@code Stream.iterate(seed, <add> * operator).parallel()} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param seed The initial element. <add> * @param operator A function to be applied to to the previous element to produce a new element <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Stream#iterate(Object, UnaryOperator) <add> */ <ide> public static <T> Stream<T> iterate(T seed, UnaryOperator<T> operator, ForkJoinPool workerPool) { <ide> requireNonNull(operator, "Operator must not be null"); <ide> <ide> return new ParallelStreamSupport<>(Stream.iterate(seed, operator).parallel(), workerPool); <ide> } <ide> <add> /** <add> * Creates a <strong>parallel</strong> infinite sequential unordered stream where each element is generated by the <add> * provided {@code Supplier}. This operation is similar to calling {@code Stream.generate(supplier).parallel()} with <add> * the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param supplier The {@code Supplier} of generated elements. <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Stream#generate(Supplier) <add> */ <ide> public static <T> Stream<T> generate(Supplier<T> supplier, ForkJoinPool workerPool) { <ide> requireNonNull(supplier, "Supplier must not be null"); <ide> <ide> return new ParallelStreamSupport<>(Stream.generate(supplier).parallel(), workerPool); <ide> } <ide> <add> /** <add> * Creates a lazily concatenated <strong>parallel</strong> stream whose elements are all the elements of the first <add> * stream followed by all the elements of the second stream. his operation is similar to calling <add> * {@code Stream.concat(a, b).parallel()} with the difference that parallel <add> * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#StreamOps">terminal <add> * operations</a> will be executed in the given {@link ForkJoinPool}. <add> * <add> * @param <T> The type of stream elements. <add> * @param a The first stream <add> * @param b The second stream <add> * @param workerPool Thread pool for parallel execution of a terminal operation. Must not be null. <add> * @return A parallel stream that executes a terminal operation in the given {@link ForkJoinPool}. <add> * @see Stream#concat(Stream, Stream) <add> */ <ide> public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b, ForkJoinPool workerPool) { <ide> requireNonNull(a, "Stream a must not be null"); <ide> requireNonNull(b, "Stream b must not be null");
Java
apache-2.0
7c94a73ba66c0ae4b1d7740a08e8ae4ec26f7c9f
0
chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.transport.discovery.multicast; import java.io.IOException; import java.net.DatagramPacket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MulticastSocket; import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.net.URI; import java.util.Iterator; import java.util.Map; import org.apache.activemq.command.DiscoveryEvent; import org.apache.activemq.transport.discovery.DiscoveryAgent; import org.apache.activemq.transport.discovery.DiscoveryListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * A {@link DiscoveryAgent} using a multicast address and heartbeat packets encoded using any * wireformat, but openwire by default. * * @version $Revision$ */ public class MulticastDiscoveryAgent implements DiscoveryAgent,Runnable{ private static final Log log=LogFactory.getLog(MulticastDiscoveryAgent.class); public static final String DEFAULT_DISCOVERY_URI_STRING="multicast://239.255.2.3:6155"; private static final String TYPE_SUFFIX="ActiveMQ-4."; private static final String ALIVE="alive."; private static final String DEAD="dead."; private static final String DELIMITER = "%"; private static final int BUFF_SIZE=8192; private static final int DEFAULT_IDLE_TIME=500; private static final int HEARTBEAT_MISS_BEFORE_DEATH=10; private long initialReconnectDelay = 1000*5; private long maxReconnectDelay = 1000 * 30; private long backOffMultiplier = 2; private boolean useExponentialBackOff = false; private int maxReconnectAttempts; class RemoteBrokerData { final String brokerName; final String service; long lastHeartBeat; long recoveryTime; int failureCount; boolean failed; public RemoteBrokerData(String brokerName, String service) { this.brokerName=brokerName; this.service=service; this.lastHeartBeat=System.currentTimeMillis(); } synchronized public void updateHeartBeat() { lastHeartBeat= System.currentTimeMillis(); // Consider that the broker recovery has succeeded if it has not failed in 60 seconds. if( !failed && failureCount>0 && (lastHeartBeat-recoveryTime) > 1000*60 ) { if(log.isDebugEnabled()) log.debug("I now think that the "+service+" service has recovered."); failureCount=0; recoveryTime=0; } } synchronized public long getLastHeartBeat() { return lastHeartBeat; } synchronized public boolean markFailed() { if ( !failed ) { failed=true; failureCount++; long reconnectDelay; if (!useExponentialBackOff) { reconnectDelay = initialReconnectDelay; } else { reconnectDelay = (long)Math.pow(backOffMultiplier, failureCount); if(reconnectDelay>maxReconnectDelay) reconnectDelay=maxReconnectDelay; } if(log.isDebugEnabled()) log.debug("Remote failure of "+service+" while still receiving multicast advertisements. Advertising events will be suppressed for "+reconnectDelay+" ms, the current failure count is: "+failureCount); recoveryTime = System.currentTimeMillis()+reconnectDelay; return true; } return false; } /** * @return true if this broker is marked failed and it is now the right time to start recovery. */ synchronized public boolean doRecovery() { if( !failed ) return false; // Are we done trying to recover this guy? if( maxReconnectAttempts>0 && failureCount > maxReconnectAttempts ) { if(log.isDebugEnabled()) log.debug("Max reconnect attempts of the "+service+" service has been reached."); return false; } // Is it not yet time? if( System.currentTimeMillis() < recoveryTime ) return false; if(log.isDebugEnabled()) log.debug("Resuming event advertisement of the "+service+" service."); failed=false; return true; } public boolean isFailed() { return failed; } } private int timeToLive=1; private boolean loopBackMode=false; private Map brokersByService=new ConcurrentHashMap(); private String group="default"; private String brokerName; private URI discoveryURI; private InetAddress inetAddress; private SocketAddress sockAddress; private DiscoveryListener discoveryListener; private String selfService; private MulticastSocket mcast; private Thread runner; private long keepAliveInterval=DEFAULT_IDLE_TIME; private long lastAdvertizeTime=0; private AtomicBoolean started=new AtomicBoolean(false); private boolean reportAdvertizeFailed=true; private final Executor executor = new ThreadPoolExecutor(1, 1, 30, TimeUnit.SECONDS, new LinkedBlockingQueue(), new ThreadFactory() { public Thread newThread(Runnable runable) { Thread t = new Thread(runable, "Multicast Discovery Agent Notifier"); t.setDaemon(true); return t; } }); /** * Set the discovery listener * * @param listener */ public void setDiscoveryListener(DiscoveryListener listener){ this.discoveryListener=listener; } /** * register a service */ public void registerService(String name) throws IOException{ this.selfService=name; if (started.get()){ doAdvertizeSelf(); } } /** * Get the group used for discovery * * @return the group */ public String getGroup(){ return group; } /** * Set the group for discovery * * @param group */ public void setGroup(String group){ this.group=group; } /** * @return Returns the brokerName. */ public String getBrokerName(){ return brokerName; } /** * @param brokerName The brokerName to set. */ public void setBrokerName(String brokerName){ if (brokerName != null){ brokerName = brokerName.replace('.','-'); brokerName = brokerName.replace(':','-'); brokerName = brokerName.replace('%','-'); this.brokerName=brokerName; } } /** * @return Returns the loopBackMode. */ public boolean isLoopBackMode(){ return loopBackMode; } /** * @param loopBackMode * The loopBackMode to set. */ public void setLoopBackMode(boolean loopBackMode){ this.loopBackMode=loopBackMode; } /** * @return Returns the timeToLive. */ public int getTimeToLive(){ return timeToLive; } /** * @param timeToLive * The timeToLive to set. */ public void setTimeToLive(int timeToLive){ this.timeToLive=timeToLive; } /** * @return the discoveryURI */ public URI getDiscoveryURI(){ return discoveryURI; } /** * Set the discoveryURI * * @param discoveryURI */ public void setDiscoveryURI(URI discoveryURI){ this.discoveryURI=discoveryURI; } public long getKeepAliveInterval(){ return keepAliveInterval; } public void setKeepAliveInterval(long keepAliveInterval){ this.keepAliveInterval=keepAliveInterval; } /** * start the discovery agent * * @throws Exception */ public void start() throws Exception{ if(started.compareAndSet(false,true)){ if(group==null|| group.length()==0){ throw new IOException("You must specify a group to discover"); } if (brokerName == null || brokerName.length()==0){ log.warn("brokerName not set"); } String type=getType(); if(!type.endsWith(".")){ log.warn("The type '"+type+"' should end with '.' to be a valid Discovery type"); type+="."; } if(discoveryURI==null){ discoveryURI=new URI(DEFAULT_DISCOVERY_URI_STRING); } this.inetAddress=InetAddress.getByName(discoveryURI.getHost()); this.sockAddress=new InetSocketAddress(this.inetAddress,discoveryURI.getPort()); mcast=new MulticastSocket(discoveryURI.getPort()); mcast.setLoopbackMode(loopBackMode); mcast.setTimeToLive(getTimeToLive()); mcast.joinGroup(inetAddress); mcast.setSoTimeout((int) keepAliveInterval); runner=new Thread(this); runner.setName("MulticastDiscovery: "+selfService); runner.setDaemon(true); runner.start(); doAdvertizeSelf(); } } /** * stop the channel * * @throws Exception */ public void stop() throws Exception{ if(started.compareAndSet(true,false)){ doAdvertizeSelf(); mcast.close(); } } public String getType(){ return group+"."+TYPE_SUFFIX; } public void run(){ byte[] buf=new byte[BUFF_SIZE]; DatagramPacket packet=new DatagramPacket(buf,0,buf.length); while(started.get()){ doTimeKeepingServices(); try{ mcast.receive(packet); if(packet.getLength()>0){ String str=new String(packet.getData(),packet.getOffset(),packet.getLength()); processData(str); } } catch(SocketTimeoutException se){ // ignore } catch(IOException e){ if( started.get() ) { log.error("failed to process packet: "+e); } } } } private void processData(String str){ if (discoveryListener != null){ if(str.startsWith(getType())){ String payload=str.substring(getType().length()); if(payload.startsWith(ALIVE)){ String brokerName=getBrokerName(payload.substring(ALIVE.length())); String service=payload.substring(ALIVE.length()+brokerName.length()+2); if(!brokerName.equals(this.brokerName)){ processAlive(brokerName,service); } }else{ String brokerName=getBrokerName(payload.substring(DEAD.length())); String service=payload.substring(DEAD.length()+brokerName.length()+2); if(!brokerName.equals(this.brokerName)){ processDead(brokerName,service); } } } } } private void doTimeKeepingServices(){ if(started.get()){ long currentTime=System.currentTimeMillis(); if (currentTime < lastAdvertizeTime || ((currentTime-keepAliveInterval)>lastAdvertizeTime)) { doAdvertizeSelf(); lastAdvertizeTime = currentTime; } doExpireOldServices(); } } private void doAdvertizeSelf(){ if(selfService!=null ){ String payload=getType(); payload+=started.get()?ALIVE:DEAD; payload+=DELIMITER+brokerName+DELIMITER; payload+=selfService; try{ byte[] data=payload.getBytes(); DatagramPacket packet=new DatagramPacket(data,0,data.length,sockAddress); mcast.send(packet); } catch(IOException e) { // If a send fails, chances are all subsequent sends will fail too.. No need to keep reporting the // same error over and over. if( reportAdvertizeFailed ) { reportAdvertizeFailed=false; log.error("Failed to advertise our service: "+payload,e); if( "Operation not permitted".equals(e.getMessage()) ) { log.error("The 'Operation not permitted' error has been know to be caused by improper firewall/network setup. Please make sure that the OS is properly configured to allow multicast traffic over: "+mcast.getLocalAddress()); } } } } } private void processAlive(String brokerName,String service){ if(selfService == null || !service.equals(selfService)){ RemoteBrokerData data = (RemoteBrokerData)brokersByService.get(service); if(data==null){ data = new RemoteBrokerData(brokerName, service); brokersByService.put(service,data);; fireServiceAddEvent(data); doAdvertizeSelf(); } else { data.updateHeartBeat(); if( data.doRecovery() ) { fireServiceAddEvent(data); } } } } private void processDead(String brokerName,String service){ if(!service.equals(selfService)){ RemoteBrokerData data = (RemoteBrokerData) brokersByService.remove(service); if(data!=null && !data.isFailed() ){ fireServiceRemovedEvent(data); } } } private void doExpireOldServices(){ long expireTime=System.currentTimeMillis()-(keepAliveInterval*HEARTBEAT_MISS_BEFORE_DEATH); for(Iterator i=brokersByService.values().iterator();i.hasNext();){ RemoteBrokerData data=(RemoteBrokerData)i.next(); if( data.getLastHeartBeat() < expireTime){ processDead(brokerName, data.service); } } } private String getBrokerName(String str){ String result = null; int start = str.indexOf(DELIMITER); if (start >= 0 ){ int end = str.indexOf(DELIMITER,start+1); result=str.substring(start+1, end); } return result; } public void serviceFailed(DiscoveryEvent event) throws IOException { RemoteBrokerData data = (RemoteBrokerData)brokersByService.get(event.getServiceName()); if(data!=null && data.markFailed() ) { fireServiceRemovedEvent(data); } } private void fireServiceRemovedEvent(RemoteBrokerData data) { if( discoveryListener!=null){ final DiscoveryEvent event=new DiscoveryEvent(data.service); event.setBrokerName(data.brokerName); // Have the listener process the event async so that // he does not block this thread since we are doing time sensitive // processing of events. executor.execute(new Runnable() { public void run() { DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; if(discoveryListener!=null){ discoveryListener.onServiceRemove(event); } } }); } } private void fireServiceAddEvent(RemoteBrokerData data) { if( discoveryListener!=null){ final DiscoveryEvent event=new DiscoveryEvent(data.service); event.setBrokerName(data.brokerName); // Have the listener process the event async so that // he does not block this thread since we are doing time sensitive // processing of events. executor.execute(new Runnable() { public void run() { DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; if(discoveryListener!=null){ discoveryListener.onServiceAdd(event); } } }); } } public long getBackOffMultiplier() { return backOffMultiplier; } public void setBackOffMultiplier(long backOffMultiplier) { this.backOffMultiplier = backOffMultiplier; } public long getInitialReconnectDelay() { return initialReconnectDelay; } public void setInitialReconnectDelay(long initialReconnectDelay) { this.initialReconnectDelay = initialReconnectDelay; } public int getMaxReconnectAttempts() { return maxReconnectAttempts; } public void setMaxReconnectAttempts(int maxReconnectAttempts) { this.maxReconnectAttempts = maxReconnectAttempts; } public long getMaxReconnectDelay() { return maxReconnectDelay; } public void setMaxReconnectDelay(long maxReconnectDelay) { this.maxReconnectDelay = maxReconnectDelay; } public boolean isUseExponentialBackOff() { return useExponentialBackOff; } public void setUseExponentialBackOff(boolean useExponentialBackOff) { this.useExponentialBackOff = useExponentialBackOff; } }
activemq-core/src/main/java/org/apache/activemq/transport/discovery/multicast/MulticastDiscoveryAgent.java
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.transport.discovery.multicast; import java.io.IOException; import java.net.DatagramPacket; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MulticastSocket; import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.net.URI; import java.util.Iterator; import java.util.Map; import org.apache.activemq.command.DiscoveryEvent; import org.apache.activemq.transport.discovery.DiscoveryAgent; import org.apache.activemq.transport.discovery.DiscoveryListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** * A {@link DiscoveryAgent} using a multicast address and heartbeat packets encoded using any * wireformat, but openwire by default. * * @version $Revision$ */ public class MulticastDiscoveryAgent implements DiscoveryAgent,Runnable{ private static final Log log=LogFactory.getLog(MulticastDiscoveryAgent.class); public static final String DEFAULT_DISCOVERY_URI_STRING="multicast://239.255.2.3:6155"; private static final String TYPE_SUFFIX="ActiveMQ-4."; private static final String ALIVE="alive."; private static final String DEAD="dead."; private static final String DELIMITER = "%"; private static final int BUFF_SIZE=8192; private static final int DEFAULT_IDLE_TIME=500; private static final int HEARTBEAT_MISS_BEFORE_DEATH=4; private int timeToLive=1; private boolean loopBackMode=false; private Map services=new ConcurrentHashMap(); private Map brokers = new ConcurrentHashMap(); private String group="default"; private String brokerName; private URI discoveryURI; private InetAddress inetAddress; private SocketAddress sockAddress; private DiscoveryListener discoveryListener; private String selfService; private MulticastSocket mcast; private Thread runner; private long keepAliveInterval=DEFAULT_IDLE_TIME; private long lastAdvertizeTime=0; private AtomicBoolean started=new AtomicBoolean(false); private boolean reportAdvertizeFailed=true; private final Executor executor = new ThreadPoolExecutor(1, 1, 30, TimeUnit.SECONDS, new LinkedBlockingQueue(), new ThreadFactory() { public Thread newThread(Runnable runable) { Thread t = new Thread(runable, "Multicast Discovery Agent Notifier"); t.setDaemon(true); return t; } }); /** * Set the discovery listener * * @param listener */ public void setDiscoveryListener(DiscoveryListener listener){ this.discoveryListener=listener; } /** * register a service */ public void registerService(String name) throws IOException{ this.selfService=name; if (started.get()){ doAdvertizeSelf(); } } /** * Get the group used for discovery * * @return the group */ public String getGroup(){ return group; } /** * Set the group for discovery * * @param group */ public void setGroup(String group){ this.group=group; } /** * @return Returns the brokerName. */ public String getBrokerName(){ return brokerName; } /** * @param brokerName The brokerName to set. */ public void setBrokerName(String brokerName){ if (brokerName != null){ brokerName = brokerName.replace('.','-'); brokerName = brokerName.replace(':','-'); brokerName = brokerName.replace('%','-'); this.brokerName=brokerName; } } /** * @return Returns the loopBackMode. */ public boolean isLoopBackMode(){ return loopBackMode; } /** * @param loopBackMode * The loopBackMode to set. */ public void setLoopBackMode(boolean loopBackMode){ this.loopBackMode=loopBackMode; } /** * @return Returns the timeToLive. */ public int getTimeToLive(){ return timeToLive; } /** * @param timeToLive * The timeToLive to set. */ public void setTimeToLive(int timeToLive){ this.timeToLive=timeToLive; } /** * @return the discoveryURI */ public URI getDiscoveryURI(){ return discoveryURI; } /** * Set the discoveryURI * * @param discoveryURI */ public void setDiscoveryURI(URI discoveryURI){ this.discoveryURI=discoveryURI; } public long getKeepAliveInterval(){ return keepAliveInterval; } public void setKeepAliveInterval(long keepAliveInterval){ this.keepAliveInterval=keepAliveInterval; } /** * start the discovery agent * * @throws Exception */ public void start() throws Exception{ if(started.compareAndSet(false,true)){ if(group==null|| group.length()==0){ throw new IOException("You must specify a group to discover"); } if (brokerName == null || brokerName.length()==0){ log.warn("brokerName not set"); } String type=getType(); if(!type.endsWith(".")){ log.warn("The type '"+type+"' should end with '.' to be a valid Discovery type"); type+="."; } if(discoveryURI==null){ discoveryURI=new URI(DEFAULT_DISCOVERY_URI_STRING); } this.inetAddress=InetAddress.getByName(discoveryURI.getHost()); this.sockAddress=new InetSocketAddress(this.inetAddress,discoveryURI.getPort()); mcast=new MulticastSocket(discoveryURI.getPort()); mcast.setLoopbackMode(loopBackMode); mcast.setTimeToLive(getTimeToLive()); mcast.joinGroup(inetAddress); mcast.setSoTimeout((int) keepAliveInterval); runner=new Thread(this); runner.setName("MulticastDiscovery: "+selfService); runner.setDaemon(true); runner.start(); doAdvertizeSelf(); } } /** * stop the channel * * @throws Exception */ public void stop() throws Exception{ if(started.compareAndSet(true,false)){ doAdvertizeSelf(); mcast.close(); } } public String getType(){ return group+"."+TYPE_SUFFIX; } public void run(){ byte[] buf=new byte[BUFF_SIZE]; DatagramPacket packet=new DatagramPacket(buf,0,buf.length); while(started.get()){ doTimeKeepingServices(); try{ mcast.receive(packet); if(packet.getLength()>0){ String str=new String(packet.getData(),packet.getOffset(),packet.getLength()); processData(str); } } catch(SocketTimeoutException se){ // ignore } catch(IOException e){ if( started.get() ) { log.error("failed to process packet: "+e); } } } } private void processData(String str){ if (discoveryListener != null){ if(str.startsWith(getType())){ String payload=str.substring(getType().length()); if(payload.startsWith(ALIVE)){ String brokerName=getBrokerName(payload.substring(ALIVE.length())); String service=payload.substring(ALIVE.length()+brokerName.length()+2); if(!brokerName.equals(this.brokerName)){ processAlive(brokerName,service); } }else{ String brokerName=getBrokerName(payload.substring(DEAD.length())); String service=payload.substring(DEAD.length()+brokerName.length()+2); if(!brokerName.equals(this.brokerName)){ processDead(brokerName,service); } } } } } private void doTimeKeepingServices(){ if(started.get()){ long currentTime=System.currentTimeMillis(); if (currentTime < lastAdvertizeTime || ((currentTime-keepAliveInterval)>lastAdvertizeTime)) { doAdvertizeSelf(); lastAdvertizeTime = currentTime; } doExpireOldServices(); } } private void doAdvertizeSelf(){ if(selfService!=null ){ String payload=getType(); payload+=started.get()?ALIVE:DEAD; payload+=DELIMITER+brokerName+DELIMITER; payload+=selfService; try{ byte[] data=payload.getBytes(); DatagramPacket packet=new DatagramPacket(data,0,data.length,sockAddress); mcast.send(packet); } catch(IOException e) { // If a send fails, chances are all subsequent sends will fail too.. No need to keep reporting the // same error over and over. if( reportAdvertizeFailed ) { reportAdvertizeFailed=false; log.error("Failed to advertise our service: "+payload,e); if( "Operation not permitted".equals(e.getMessage()) ) { log.error("The 'Operation not permitted' error has been know to be caused by improper firewall/network setup. Please make sure that the OS is properly configured to allow multicast traffic over: "+mcast.getLocalAddress()); } } } } } private void processAlive(String brokerName,String service){ if(selfService == null || !service.equals(selfService)){ AtomicLong lastKeepAlive=(AtomicLong) services.get(service); if(lastKeepAlive==null){ brokers.put(service, brokerName); if(discoveryListener!=null){ final DiscoveryEvent event=new DiscoveryEvent(service); event.setBrokerName(brokerName); // Have the listener process the event async so that // he does not block this thread since we are doing time sensitive // processing of events. executor.execute(new Runnable() { public void run() { DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; if(discoveryListener!=null){ discoveryListener.onServiceAdd(event); } } }); } lastKeepAlive=new AtomicLong(System.currentTimeMillis()); services.put(service,lastKeepAlive); doAdvertizeSelf(); } lastKeepAlive.set(System.currentTimeMillis()); } } private void processDead(String brokerName,String service){ if(!service.equals(selfService)){ if(services.remove(service)!=null){ brokers.remove(service); if(discoveryListener!=null){ final DiscoveryEvent event=new DiscoveryEvent(service); event.setBrokerName(brokerName); // Have the listener process the event async so that // he does not block this thread since we are doing time sensitive // processing of events. executor.execute(new Runnable() { public void run() { DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; if(discoveryListener!=null){ discoveryListener.onServiceRemove(event); } } }); } } } } private void doExpireOldServices(){ long expireTime=System.currentTimeMillis()-(keepAliveInterval*HEARTBEAT_MISS_BEFORE_DEATH); for(Iterator i=services.entrySet().iterator();i.hasNext();){ Map.Entry entry=(Map.Entry) i.next(); AtomicLong lastHeartBeat=(AtomicLong) entry.getValue(); if(lastHeartBeat.get()<expireTime){ String brokerName = (String)brokers.get(entry.getKey()); processDead(brokerName,entry.getKey().toString()); } } } private String getBrokerName(String str){ String result = null; int start = str.indexOf(DELIMITER); if (start >= 0 ){ int end = str.indexOf(DELIMITER,start+1); result=str.substring(start+1, end); } return result; } public void serviceFailed(DiscoveryEvent event) throws IOException { processDead(event.getBrokerName(), event.getServiceName()); } }
Fix for AMQ-1342 - Added backoff delay in generating discovery events when broker failures are reported git-svn-id: d2a93f579bd4835921162e9a69396c846e49961c@559132 13f79535-47bb-0310-9956-ffa450edef68
activemq-core/src/main/java/org/apache/activemq/transport/discovery/multicast/MulticastDiscoveryAgent.java
Fix for AMQ-1342 - Added backoff delay in generating discovery events when broker failures are reported
<ide><path>ctivemq-core/src/main/java/org/apache/activemq/transport/discovery/multicast/MulticastDiscoveryAgent.java <ide> private static final String DELIMITER = "%"; <ide> private static final int BUFF_SIZE=8192; <ide> private static final int DEFAULT_IDLE_TIME=500; <del> private static final int HEARTBEAT_MISS_BEFORE_DEATH=4; <add> private static final int HEARTBEAT_MISS_BEFORE_DEATH=10; <add> <add> private long initialReconnectDelay = 1000*5; <add> private long maxReconnectDelay = 1000 * 30; <add> private long backOffMultiplier = 2; <add> private boolean useExponentialBackOff = false; <add> private int maxReconnectAttempts; <add> <add> <add> class RemoteBrokerData { <add> final String brokerName; <add> final String service; <add> long lastHeartBeat; <add> long recoveryTime; <add> int failureCount; <add> boolean failed; <add> <add> public RemoteBrokerData(String brokerName, String service) { <add> this.brokerName=brokerName; <add> this.service=service; <add> this.lastHeartBeat=System.currentTimeMillis(); <add> } <add> <add> synchronized public void updateHeartBeat() { <add> lastHeartBeat= System.currentTimeMillis(); <add> <add> // Consider that the broker recovery has succeeded if it has not failed in 60 seconds. <add> if( !failed && failureCount>0 && (lastHeartBeat-recoveryTime) > 1000*60 ) { <add> if(log.isDebugEnabled()) <add> log.debug("I now think that the "+service+" service has recovered."); <add> failureCount=0; <add> recoveryTime=0; <add> } <add> } <add> <add> synchronized public long getLastHeartBeat() { <add> return lastHeartBeat; <add> } <add> <add> synchronized public boolean markFailed() { <add> if ( !failed ) { <add> failed=true; <add> failureCount++; <add> <add> long reconnectDelay; <add> if (!useExponentialBackOff) { <add> reconnectDelay = initialReconnectDelay; <add> } else { <add> reconnectDelay = (long)Math.pow(backOffMultiplier, failureCount); <add> if(reconnectDelay>maxReconnectDelay) <add> reconnectDelay=maxReconnectDelay; <add> } <add> <add> if(log.isDebugEnabled()) <add> log.debug("Remote failure of "+service+" while still receiving multicast advertisements. Advertising events will be suppressed for "+reconnectDelay+" ms, the current failure count is: "+failureCount); <add> <add> recoveryTime = System.currentTimeMillis()+reconnectDelay; <add> return true; <add> } <add> return false; <add> } <add> <add> /** <add> * @return true if this broker is marked failed and it is now the right time to start recovery. <add> */ <add> synchronized public boolean doRecovery() { <add> if( !failed ) <add> return false; <add> <add> // Are we done trying to recover this guy? <add> if( maxReconnectAttempts>0 && failureCount > maxReconnectAttempts ) { <add> if(log.isDebugEnabled()) <add> log.debug("Max reconnect attempts of the "+service+" service has been reached."); <add> return false; <add> } <add> <add> // Is it not yet time? <add> if( System.currentTimeMillis() < recoveryTime ) <add> return false; <add> <add> if(log.isDebugEnabled()) <add> log.debug("Resuming event advertisement of the "+service+" service."); <add> <add> <add> failed=false; <add> return true; <add> } <add> <add> public boolean isFailed() { <add> return failed; <add> } <add> } <add> <ide> private int timeToLive=1; <ide> private boolean loopBackMode=false; <del> private Map services=new ConcurrentHashMap(); <del> private Map brokers = new ConcurrentHashMap(); <add> private Map brokersByService=new ConcurrentHashMap(); <ide> private String group="default"; <ide> private String brokerName; <ide> private URI discoveryURI; <ide> <ide> private void processAlive(String brokerName,String service){ <ide> if(selfService == null || !service.equals(selfService)){ <del> AtomicLong lastKeepAlive=(AtomicLong) services.get(service); <del> if(lastKeepAlive==null){ <del> brokers.put(service, brokerName); <del> if(discoveryListener!=null){ <del> final DiscoveryEvent event=new DiscoveryEvent(service); <del> event.setBrokerName(brokerName); <del> <del> // Have the listener process the event async so that <del> // he does not block this thread since we are doing time sensitive <del> // processing of events. <del> executor.execute(new Runnable() { <del> public void run() { <del> DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; <del> if(discoveryListener!=null){ <del> discoveryListener.onServiceAdd(event); <del> } <del> } <del> }); <del> } <del> lastKeepAlive=new AtomicLong(System.currentTimeMillis()); <del> services.put(service,lastKeepAlive); <add> RemoteBrokerData data = (RemoteBrokerData)brokersByService.get(service); <add> if(data==null){ <add> data = new RemoteBrokerData(brokerName, service); <add> brokersByService.put(service,data);; <add> fireServiceAddEvent(data); <ide> doAdvertizeSelf(); <ide> <del> } <del> lastKeepAlive.set(System.currentTimeMillis()); <add> } else { <add> data.updateHeartBeat(); <add> if( data.doRecovery() ) { <add> fireServiceAddEvent(data); <add> } <add> } <ide> } <ide> } <ide> <ide> private void processDead(String brokerName,String service){ <ide> if(!service.equals(selfService)){ <del> if(services.remove(service)!=null){ <del> brokers.remove(service); <del> if(discoveryListener!=null){ <del> final DiscoveryEvent event=new DiscoveryEvent(service); <del> event.setBrokerName(brokerName); <del> <del> // Have the listener process the event async so that <del> // he does not block this thread since we are doing time sensitive <del> // processing of events. <del> executor.execute(new Runnable() { <del> public void run() { <del> DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; <del> if(discoveryListener!=null){ <del> discoveryListener.onServiceRemove(event); <del> } <del> } <del> }); <del> } <add> RemoteBrokerData data = (RemoteBrokerData) brokersByService.remove(service); <add> if(data!=null && !data.isFailed() ){ <add> fireServiceRemovedEvent(data); <ide> } <ide> } <ide> } <ide> <ide> private void doExpireOldServices(){ <ide> long expireTime=System.currentTimeMillis()-(keepAliveInterval*HEARTBEAT_MISS_BEFORE_DEATH); <del> for(Iterator i=services.entrySet().iterator();i.hasNext();){ <del> Map.Entry entry=(Map.Entry) i.next(); <del> AtomicLong lastHeartBeat=(AtomicLong) entry.getValue(); <del> if(lastHeartBeat.get()<expireTime){ <del> String brokerName = (String)brokers.get(entry.getKey()); <del> processDead(brokerName,entry.getKey().toString()); <add> for(Iterator i=brokersByService.values().iterator();i.hasNext();){ <add> RemoteBrokerData data=(RemoteBrokerData)i.next(); <add> if( data.getLastHeartBeat() < expireTime){ <add> processDead(brokerName, data.service); <ide> } <ide> } <ide> } <ide> } <ide> <ide> public void serviceFailed(DiscoveryEvent event) throws IOException { <del> processDead(event.getBrokerName(), event.getServiceName()); <del> } <add> RemoteBrokerData data = (RemoteBrokerData)brokersByService.get(event.getServiceName()); <add> if(data!=null && data.markFailed() ) { <add> fireServiceRemovedEvent(data); <add> } <add> } <add> <add> private void fireServiceRemovedEvent(RemoteBrokerData data) { <add> if( discoveryListener!=null){ <add> final DiscoveryEvent event=new DiscoveryEvent(data.service); <add> event.setBrokerName(data.brokerName); <add> <add> // Have the listener process the event async so that <add> // he does not block this thread since we are doing time sensitive <add> // processing of events. <add> executor.execute(new Runnable() { <add> public void run() { <add> DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; <add> if(discoveryListener!=null){ <add> discoveryListener.onServiceRemove(event); <add> } <add> } <add> }); <add> } <add> } <add> private void fireServiceAddEvent(RemoteBrokerData data) { <add> if( discoveryListener!=null){ <add> final DiscoveryEvent event=new DiscoveryEvent(data.service); <add> event.setBrokerName(data.brokerName); <add> <add> // Have the listener process the event async so that <add> // he does not block this thread since we are doing time sensitive <add> // processing of events. <add> executor.execute(new Runnable() { <add> public void run() { <add> DiscoveryListener discoveryListener = MulticastDiscoveryAgent.this.discoveryListener; <add> if(discoveryListener!=null){ <add> discoveryListener.onServiceAdd(event); <add> } <add> } <add> }); <add> } <add> } <add> <add> public long getBackOffMultiplier() { <add> return backOffMultiplier; <add> } <add> <add> public void setBackOffMultiplier(long backOffMultiplier) { <add> this.backOffMultiplier = backOffMultiplier; <add> } <add> <add> public long getInitialReconnectDelay() { <add> return initialReconnectDelay; <add> } <add> <add> public void setInitialReconnectDelay(long initialReconnectDelay) { <add> this.initialReconnectDelay = initialReconnectDelay; <add> } <add> <add> public int getMaxReconnectAttempts() { <add> return maxReconnectAttempts; <add> } <add> <add> public void setMaxReconnectAttempts(int maxReconnectAttempts) { <add> this.maxReconnectAttempts = maxReconnectAttempts; <add> } <add> <add> public long getMaxReconnectDelay() { <add> return maxReconnectDelay; <add> } <add> <add> public void setMaxReconnectDelay(long maxReconnectDelay) { <add> this.maxReconnectDelay = maxReconnectDelay; <add> } <add> <add> public boolean isUseExponentialBackOff() { <add> return useExponentialBackOff; <add> } <add> <add> public void setUseExponentialBackOff(boolean useExponentialBackOff) { <add> this.useExponentialBackOff = useExponentialBackOff; <add> } <ide> }
Java
mit
f3e34a966f496000c0a63b91c5021d02452de1d1
0
Roxforbraynz/mods,Roxforbraynz/mods
package rox.warpstone.util; public class Octree { //The root node of the tree. private OctreeNode root; public Octree(int xwidth, int yheight, int zdepth, Location origin) { root = new OctreeNode(xwidth, yheight, zdepth, origin); } public void insert(Location loc) { root.insert(loc); } /* * Private class that makes up the tree itself. This is hidden from outside * code, with only what's in the Octree class exposed. */ private class OctreeNode { //Data held by the node private Location data; //This node's parent. Null if root node. private OctreeNode parent; //This node's children. 8 elements long. private OctreeNode children[]; //Dimensions of the bounding box. private int xwidth; private int yheight; private int zdepth; //Only for making a root node public OctreeNode(int xwidth, int yheight, int zdepth, Location origin) { this.xwidth = xwidth; this.yheight = yheight; this.zdepth = zdepth; this.children = new OctreeNode[8]; //Make empty children so that the root node is considered an interior node. for (int i=0; i>8; i++) { this.children[i] = new OctreeNode(this); } } public OctreeNode(OctreeNode parent) { this(null, parent); } public OctreeNode(Location loc, OctreeNode parent) { this.data = loc; this.parent = parent; this.xwidth = parent.xwidth/2; this.yheight = parent.yheight/2; this.zdepth = parent.zdepth/2; this.children = new OctreeNode[8]; } public boolean isLeaf() { //If all children are null, this is a leaf node, else it's an interior node. return (this.children[0] == null && this.children[1] == null && this.children[2] == null && this.children[3] == null && this.children[4] == null && this.children[5] == null && this.children[6] == null && this.children[7] == null)? true : false; } public boolean hasData() { return (this.data != null)? true : false; } public void insert(Location loc) { if (!isLeaf()) { //determine the index to run the insert on. int index = pickIndex(loc); //If the child isn't initialized do so and insert. if (this.children[index] == null) { this.children[index] = new OctreeNode(loc, this); } //Else recurse in and insert. else { this.children[index].insert(loc); } } else if (hasData()) { //Back up, make a new interior node, place this as a child of the new interior node, and then insert the loc in the new parent. //Get the index of this node in relation to the parent. int index = parent.pickIndex(this.data); //Create the interior center point for this new interior child. Location inLoc = new Location(0,((index&0x1)==0x1)?parent.data.getX()+(xwidth/2):parent.data.getX()-(xwidth/2), ((index&0x2)==0x2)?parent.data.getY()+(yheight/2):parent.data.getY()-(yheight/2), ((index&0x4)==0x4)?parent.data.getZ()+(zdepth/2):parent.data.getZ()-(zdepth/2)); //Make the new interior child parent.children[index] = new OctreeNode(inLoc, parent); //grab the new node to use as a parent. parent = parent.children[index]; //Attach this node as a new leaf of the new parent. parent.children[parent.pickIndex(this.data)] = this; //Insert a new node on the parent. parent.insert(loc); } else { this.data = loc; } } /** * Picks the children index a given location points toward. * @param loc The location * @return The index */ private int pickIndex(Location loc) { int index = 0; //If the x value of the node is smaller than the x value of the //provided point, set the 1's place bit index = ((this.data.getX() - loc.getX()) >> 31 ) & 0x1; //If the y value of the node is smaller than the y value of the //provided point, set the 2's place bit index = ((((this.data.getY() - loc.getY()) >> 31 ) & 0x1 ) << 1 ) ^ index; //If the z value of the node is smaller than the z value of the //provided point, set the 4's place bit index = ((((this.data.getZ() - loc.getZ()) >> 31 ) & 0x1 ) << 2 ) ^ index; return index; } } }
Warp_Stones/java/rox/warpstone/util/Octree.java
package rox.warpstone.util; public class Octree { //The root node of the tree. private OctreeNode root; private int xmin; private int xmax; private int ymin; private int ymax; private int zmin; private int zmax; public Octree(int xmin, int xmax, int ymin, int ymax, int zmin, int zmax) { this.xmin = xmin; this.xmax = xmax; this.ymin = ymin; this.ymax = ymax; this.zmin = zmin; this.zmax = zmax; root = new OctreeNode(); } public void insert(Location loc) { root.insert(loc); } /* * Private class that makes up the tree itself. This is hidden from outside * code, with only what's in the Octree class exposed. */ private class OctreeNode { //Data held by the node private Location data; //This node's parent. Null if root node. private OctreeNode parent; //This node's children. 8 elements long. private OctreeNode children[]; public OctreeNode() { this.children = new OctreeNode[8]; } public OctreeNode(OctreeNode parent) { this(null, parent); } public OctreeNode(Location loc, OctreeNode parent) { this.data = loc; this.parent = parent; } public boolean isLeaf() { //If all children are null, this is a leaf node, else it's an interior node. return (children[0] == null && children[1] == null && children[2] == null && children[3] == null && children[4] == null && children[5] == null && children[6] == null && children[7] == null)? true : false; } public boolean hasData() { return (data != null)? true : false; } public void insert(Location loc) { if (!isLeaf()) { //determine the index to run the insert on. int index = 0; //If the x value of the node is larger than the x value of the //provided point, set the 1's place bit index = (((loc.getX() - data.getX()) >> 31 ) & 0x1 ); //If the y value of the node is larger than the y value of the //provided point, set the 2's place bit index = ((((loc.getY() - data.getY()) >> 31 ) & 0x1 ) << 1 ) ^ index; //If the z value of the node is larger than the z value of the //provided point, set the 4's place bit index = ((((loc.getZ() - data.getZ()) >> 31 ) & 0x1 ) << 2 ) ^ index; //If the child isn't initialized do so and insert. if (children[index] == null) { children[index] = new OctreeNode(loc, this); } //Else recurse in and insert. else { children[index].insert(loc); } } else if (hasData()) { //Back up, make a new interior node, and then keep making interior nodes until both this node and the new coordinates can be placed as leaves. } else { this.data = loc; } } } }
More Octree work. Chopped the unnecessary bits out of the first part of the index picker code, and made it into it's own private helper method.
Warp_Stones/java/rox/warpstone/util/Octree.java
More Octree work. Chopped the unnecessary bits out of the first part of the index picker code, and made it into it's own private helper method.
<ide><path>arp_Stones/java/rox/warpstone/util/Octree.java <ide> <ide> //The root node of the tree. <ide> private OctreeNode root; <del> private int xmin; <del> private int xmax; <del> private int ymin; <del> private int ymax; <del> private int zmin; <del> private int zmax; <ide> <del> public Octree(int xmin, int xmax, int ymin, int ymax, int zmin, int zmax) { <del> this.xmin = xmin; <del> this.xmax = xmax; <del> this.ymin = ymin; <del> this.ymax = ymax; <del> this.zmin = zmin; <del> this.zmax = zmax; <del> <del> root = new OctreeNode(); <add> <add> public Octree(int xwidth, int yheight, int zdepth, Location origin) { <add> root = new OctreeNode(xwidth, yheight, zdepth, origin); <ide> } <ide> <ide> public void insert(Location loc) { <ide> <ide> //Data held by the node <ide> private Location data; <del> <ide> //This node's parent. Null if root node. <ide> private OctreeNode parent; <del> <ide> //This node's children. 8 elements long. <ide> private OctreeNode children[]; <ide> <del> public OctreeNode() { <add> //Dimensions of the bounding box. <add> private int xwidth; <add> private int yheight; <add> private int zdepth; <add> <add> //Only for making a root node <add> public OctreeNode(int xwidth, int yheight, int zdepth, Location origin) { <add> this.xwidth = xwidth; <add> this.yheight = yheight; <add> this.zdepth = zdepth; <ide> this.children = new OctreeNode[8]; <add> <add> //Make empty children so that the root node is considered an interior node. <add> for (int i=0; i>8; i++) { <add> this.children[i] = new OctreeNode(this); <add> } <ide> } <ide> <ide> public OctreeNode(OctreeNode parent) { <ide> public OctreeNode(Location loc, OctreeNode parent) { <ide> this.data = loc; <ide> this.parent = parent; <add> this.xwidth = parent.xwidth/2; <add> this.yheight = parent.yheight/2; <add> this.zdepth = parent.zdepth/2; <add> <add> this.children = new OctreeNode[8]; <ide> } <ide> <ide> public boolean isLeaf() { <ide> //If all children are null, this is a leaf node, else it's an interior node. <del> return (children[0] == null <del> && children[1] == null <del> && children[2] == null <del> && children[3] == null <del> && children[4] == null <del> && children[5] == null <del> && children[6] == null <del> && children[7] == null)? true : false; <add> return (this.children[0] == null <add> && this.children[1] == null <add> && this.children[2] == null <add> && this.children[3] == null <add> && this.children[4] == null <add> && this.children[5] == null <add> && this.children[6] == null <add> && this.children[7] == null)? true : false; <ide> } <ide> <ide> public boolean hasData() { <del> return (data != null)? true : false; <add> return (this.data != null)? true : false; <ide> } <ide> <ide> public void insert(Location loc) { <ide> if (!isLeaf()) { <ide> //determine the index to run the insert on. <del> int index = 0; <del> //If the x value of the node is larger than the x value of the <del> //provided point, set the 1's place bit <del> index = (((loc.getX() - data.getX()) >> 31 ) & 0x1 ); <del> //If the y value of the node is larger than the y value of the <del> //provided point, set the 2's place bit <del> index = ((((loc.getY() - data.getY()) >> 31 ) & 0x1 ) << 1 ) ^ index; <del> //If the z value of the node is larger than the z value of the <del> //provided point, set the 4's place bit <del> index = ((((loc.getZ() - data.getZ()) >> 31 ) & 0x1 ) << 2 ) ^ index; <add> int index = pickIndex(loc); <ide> <ide> //If the child isn't initialized do so and insert. <del> if (children[index] == null) { <del> children[index] = new OctreeNode(loc, this); <add> if (this.children[index] == null) { <add> this.children[index] = new OctreeNode(loc, this); <ide> } <ide> //Else recurse in and insert. <ide> else { <del> children[index].insert(loc); <add> this.children[index].insert(loc); <ide> } <ide> } <ide> else if (hasData()) { <del> //Back up, make a new interior node, and then keep making interior nodes until both this node and the new coordinates can be placed as leaves. <add> //Back up, make a new interior node, place this as a child of the new interior node, and then insert the loc in the new parent. <add> <add> //Get the index of this node in relation to the parent. <add> int index = parent.pickIndex(this.data); <add> <add> //Create the interior center point for this new interior child. <add> Location inLoc = new Location(0,((index&0x1)==0x1)?parent.data.getX()+(xwidth/2):parent.data.getX()-(xwidth/2), <add> ((index&0x2)==0x2)?parent.data.getY()+(yheight/2):parent.data.getY()-(yheight/2), <add> ((index&0x4)==0x4)?parent.data.getZ()+(zdepth/2):parent.data.getZ()-(zdepth/2)); <add> <add> //Make the new interior child <add> parent.children[index] = new OctreeNode(inLoc, parent); <add> //grab the new node to use as a parent. <add> parent = parent.children[index]; <add> //Attach this node as a new leaf of the new parent. <add> parent.children[parent.pickIndex(this.data)] = this; <add> //Insert a new node on the parent. <add> parent.insert(loc); <ide> } <ide> else { <ide> this.data = loc; <ide> } <ide> } <add> <add> /** <add> * Picks the children index a given location points toward. <add> * @param loc The location <add> * @return The index <add> */ <add> private int pickIndex(Location loc) { <add> int index = 0; <add> //If the x value of the node is smaller than the x value of the <add> //provided point, set the 1's place bit <add> index = ((this.data.getX() - loc.getX()) >> 31 ) & 0x1; <add> //If the y value of the node is smaller than the y value of the <add> //provided point, set the 2's place bit <add> index = ((((this.data.getY() - loc.getY()) >> 31 ) & 0x1 ) << 1 ) ^ index; <add> //If the z value of the node is smaller than the z value of the <add> //provided point, set the 4's place bit <add> index = ((((this.data.getZ() - loc.getZ()) >> 31 ) & 0x1 ) << 2 ) ^ index; <add> <add> return index; <add> } <add> <ide> } <ide> <ide> }
Java
apache-2.0
7f6c4e2eda93b881ea9806f12f4ec61e92e2b55d
0
dannyzhou98/Terasology,AWildBeard/Terasology,Felges/Terasology,Malanius/Terasology,CC4401-TeraCity/TeraCity,dimamo5/Terasology,dimamo5/Terasology,leelib/Terasology,CC4401-TeraCity/TeraCity,samuto/Terasology,Josharias/Terasology,flo/Terasology,Nanoware/Terasology,kaen/Terasology,indianajohn/Terasology,indianajohn/Terasology,Nanoware/Terasology,frankpunx/Terasology,MovingBlocks/Terasology,flo/Terasology,Felges/Terasology,kartikey0303/Terasology,dannyzhou98/Terasology,sceptross/Terasology,sceptross/Terasology,Josharias/Terasology,MovingBlocks/Terasology,Vizaxo/Terasology,MarcinSc/Terasology,Vizaxo/Terasology,Nanoware/Terasology,Halamix2/Terasology,Ciclop/Terasology,kaen/Terasology,frankpunx/Terasology,jacklotusho/Terasology,kartikey0303/Terasology,DPirate/Terasology,MovingBlocks/Terasology,DPirate/Terasology,mertserezli/Terasology,leelib/Terasology,Malanius/Terasology,Halamix2/Terasology,immortius/Terasology,samuto/Terasology,Ciclop/Terasology,jacklotusho/Terasology,neshume/Terasology,mertserezli/Terasology,MarcinSc/Terasology,immortius/Terasology,AWildBeard/Terasology,neshume/Terasology
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.nui.layers.ingame.metrics; import com.google.common.collect.Lists; import org.terasology.config.Config; import org.terasology.engine.GameEngine; import org.terasology.engine.Time; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.input.cameraTarget.CameraTargetSystem; import org.terasology.logic.characters.CharacterComponent; import org.terasology.logic.players.LocalPlayer; import org.terasology.math.TeraMath; import org.terasology.math.Vector3i; import org.terasology.monitoring.PerformanceMonitor; import org.terasology.persistence.StorageManager; import org.terasology.registry.CoreRegistry; import org.terasology.registry.In; import org.terasology.rendering.nui.CoreScreenLayer; import org.terasology.rendering.nui.databinding.ReadOnlyBinding; import org.terasology.rendering.nui.widgets.UILabel; import org.terasology.rendering.primitives.ChunkTessellator; import org.terasology.world.WorldProvider; import org.terasology.world.biomes.Biome; import org.terasology.world.biomes.BiomeManager; import javax.vecmath.Vector3f; import java.util.List; import java.util.Locale; /** * @author Immortius */ public class DebugOverlay extends CoreScreenLayer { @In private Config config; @In private GameEngine engine; @In private CameraTargetSystem cameraTarget; @In private Time time; @In private EntityManager entityManager; @In private LocalPlayer localPlayer; @In private WorldProvider worldProvider; private List<MetricsMode> metricsModes = Lists.newArrayList(new NullMetricsMode(), new RunningMeansMode(), new SpikesMode(), new AllocationsMode(), new RunningThreadsMode(), new WorldRendererMode(), new NetworkStatsMode()); private int currentMode; private UILabel metricsLabel; @In private StorageManager storageManager; @Override public void initialise() { bindVisible(new ReadOnlyBinding<Boolean>() { @Override public Boolean get() { return config.getSystem().isDebugEnabled(); } }); UILabel debugLine1 = find("debugLine1", UILabel.class); if (debugLine1 != null) { debugLine1.bindText(new TimedBinding<String>(0.5f, new ReadOnlyBinding<String>() { @Override public String get() { double memoryUsage = ((double) Runtime.getRuntime().totalMemory() - (double) Runtime.getRuntime().freeMemory()) / 1048576.0; return String.format("fps: %.2f, mem usage: %.2f MB, total mem: %.2f MB, max mem: %.2f MB", time.getFps(), memoryUsage, Runtime.getRuntime().totalMemory() / 1048576.0, Runtime.getRuntime().maxMemory() / 1048576.0); } })); } UILabel debugLine2 = find("debugLine2", UILabel.class); if (debugLine2 != null) { debugLine2.bindText(new ReadOnlyBinding<String>() { @Override public String get() { return String.format("Active Entities: %s, Current Target: %s", entityManager.getActiveEntityCount(), cameraTarget.toString()); } }); } UILabel debugLine3 = find("debugLine3", UILabel.class); if (debugLine3 != null) { debugLine3.bindText(new ReadOnlyBinding<String>() { @Override public String get() { Vector3f pos = localPlayer.getPosition(); CharacterComponent character = localPlayer.getCharacterEntity().getComponent(CharacterComponent.class); float yaw = (character != null) ? character.yaw : 0; Vector3i chunkPos = TeraMath.calcChunkPos((int) pos.x, (int) pos.y, (int) pos.z); return String.format(Locale.US, "Pos (%.2f, %.2f, %.2f), Chunk (%d, %d, %d), Yaw %.2f", pos.x, pos.y, pos.z, chunkPos.x, chunkPos.y, chunkPos.z, yaw); } }); } UILabel debugLine4 = find("debugLine4", UILabel.class); if (debugLine4 != null) { debugLine4.bindText(new ReadOnlyBinding<String>() { @Override public String get() { String biomeId = "unavailable"; Vector3i blockPos = new Vector3i(localPlayer.getPosition()); if (worldProvider.isBlockRelevant(blockPos)) { Biome biome = worldProvider.getBiome(blockPos); biomeId = CoreRegistry.get(BiomeManager.class).getBiomeId(biome); } return String.format("total vus: %s | worldTime: %.3f | biome: %s", ChunkTessellator.getVertexArrayUpdateCount(), worldProvider.getTime().getDays() - 0.0005f, // use floor instead of rounding up biomeId); } }); } UILabel saveStatusLabel = find("saveStatusLabel", UILabel.class); // clients do not have a storage manager if (saveStatusLabel != null && storageManager != null) { saveStatusLabel.bindText(new ReadOnlyBinding<String>() { @Override public String get() { return "Saving... "; } }); saveStatusLabel.bindVisible( new ReadOnlyBinding<Boolean>() { @Override public Boolean get() { return storageManager.isSaving(); } } ); } metricsLabel = find("metrics", UILabel.class); } @Override public void update(float delta) { if (metricsLabel != null) { metricsLabel.setText(metricsModes.get(currentMode).getMetrics()); } } @Override public boolean isModal() { return false; } @Override public boolean isEscapeToCloseAllowed() { return false; } public void toggleMetricsMode() { currentMode = (currentMode + 1) % metricsModes.size(); while (!metricsModes.get(currentMode).isAvailable()) { currentMode = (currentMode + 1) % metricsModes.size(); } PerformanceMonitor.setEnabled(metricsModes.get(currentMode).isPerformanceManagerMode()); } }
engine/src/main/java/org/terasology/rendering/nui/layers/ingame/metrics/DebugOverlay.java
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.nui.layers.ingame.metrics; import com.google.common.collect.Lists; import org.terasology.config.Config; import org.terasology.engine.GameEngine; import org.terasology.engine.Time; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.input.cameraTarget.CameraTargetSystem; import org.terasology.logic.characters.CharacterComponent; import org.terasology.logic.players.LocalPlayer; import org.terasology.math.TeraMath; import org.terasology.math.Vector3i; import org.terasology.monitoring.PerformanceMonitor; import org.terasology.persistence.StorageManager; import org.terasology.registry.CoreRegistry; import org.terasology.registry.In; import org.terasology.rendering.nui.CoreScreenLayer; import org.terasology.rendering.nui.databinding.ReadOnlyBinding; import org.terasology.rendering.nui.widgets.UILabel; import org.terasology.rendering.primitives.ChunkTessellator; import org.terasology.world.WorldProvider; import org.terasology.world.biomes.Biome; import org.terasology.world.biomes.BiomeManager; import javax.vecmath.Vector3f; import java.util.List; import java.util.Locale; /** * @author Immortius */ public class DebugOverlay extends CoreScreenLayer { @In private Config config; @In private GameEngine engine; @In private CameraTargetSystem cameraTarget; @In private Time time; @In private EntityManager entityManager; @In private LocalPlayer localPlayer; @In private WorldProvider worldProvider; private List<MetricsMode> metricsModes = Lists.newArrayList(new NullMetricsMode(), new RunningMeansMode(), new SpikesMode(), new AllocationsMode(), new RunningThreadsMode(), new WorldRendererMode(), new NetworkStatsMode()); private int currentMode; private UILabel metricsLabel; @In private StorageManager storageManager; @Override public void initialise() { bindVisible(new ReadOnlyBinding<Boolean>() { @Override public Boolean get() { return config.getSystem().isDebugEnabled(); } }); UILabel debugLine1 = find("debugLine1", UILabel.class); if (debugLine1 != null) { debugLine1.bindText(new TimedBinding<String>(0.5f, new ReadOnlyBinding<String>() { @Override public String get() { double memoryUsage = ((double) Runtime.getRuntime().totalMemory() - (double) Runtime.getRuntime().freeMemory()) / 1048576.0; return String.format("fps: %.2f, mem usage: %.2f MB, total mem: %.2f MB, max mem: %.2f MB", time.getFps(), memoryUsage, Runtime.getRuntime().totalMemory() / 1048576.0, Runtime.getRuntime().maxMemory() / 1048576.0); } })); } UILabel debugLine2 = find("debugLine2", UILabel.class); if (debugLine2 != null) { debugLine2.bindText(new ReadOnlyBinding<String>() { @Override public String get() { return String.format("Active Entities: %s, Current Target: %s", entityManager.getActiveEntityCount(), cameraTarget.toString()); } }); } UILabel debugLine3 = find("debugLine3", UILabel.class); if (debugLine3 != null) { debugLine3.bindText(new ReadOnlyBinding<String>() { @Override public String get() { Vector3f pos = localPlayer.getPosition(); CharacterComponent character = localPlayer.getCharacterEntity().getComponent(CharacterComponent.class); float yaw = (character != null) ? character.yaw : 0; Vector3i chunkPos = TeraMath.calcChunkPos((int) pos.x, (int) pos.y, (int) pos.z); return String.format(Locale.US, "Pos (%.2f, %.2f, %.2f), Chunk (%d, %d, %d), Yaw %.2f", pos.x, pos.y, pos.z, chunkPos.x, chunkPos.y, chunkPos.z, yaw); } }); } UILabel debugLine4 = find("debugLine4", UILabel.class); if (debugLine4 != null) { debugLine4.bindText(new ReadOnlyBinding<String>() { @Override public String get() { String biomeId = "unavailable"; Vector3i blockPos = new Vector3i(localPlayer.getPosition()); if (worldProvider.isBlockRelevant(blockPos)) { Biome biome = worldProvider.getBiome(blockPos); biomeId = CoreRegistry.get(BiomeManager.class).getBiomeId(biome); } return String.format("total vus: %s | worldTime: %.3f | biome: %s", ChunkTessellator.getVertexArrayUpdateCount(), worldProvider.getTime().getDays() - 0.0005f, // use floor instead of rounding up biomeId); } }); } UILabel saveStatusLabel = find("saveStatusLabel", UILabel.class); if (saveStatusLabel != null) { saveStatusLabel.bindText(new ReadOnlyBinding<String>() { @Override public String get() { return "Saving... "; } }); saveStatusLabel.bindVisible( new ReadOnlyBinding<Boolean>() { @Override public Boolean get() { return storageManager.isSaving(); } } ); } metricsLabel = find("metrics", UILabel.class); } @Override public void update(float delta) { if (metricsLabel != null) { metricsLabel.setText(metricsModes.get(currentMode).getMetrics()); } } @Override public boolean isModal() { return false; } @Override public boolean isEscapeToCloseAllowed() { return false; } public void toggleMetricsMode() { currentMode = (currentMode + 1) % metricsModes.size(); while (!metricsModes.get(currentMode).isAvailable()) { currentMode = (currentMode + 1) % metricsModes.size(); } PerformanceMonitor.setEnabled(metricsModes.get(currentMode).isPerformanceManagerMode()); } }
Fix NPE in DebugOverlay. Closes #1376
engine/src/main/java/org/terasology/rendering/nui/layers/ingame/metrics/DebugOverlay.java
Fix NPE in DebugOverlay. Closes #1376
<ide><path>ngine/src/main/java/org/terasology/rendering/nui/layers/ingame/metrics/DebugOverlay.java <ide> }); <ide> } <ide> UILabel saveStatusLabel = find("saveStatusLabel", UILabel.class); <del> if (saveStatusLabel != null) { <add> // clients do not have a storage manager <add> if (saveStatusLabel != null && storageManager != null) { <ide> saveStatusLabel.bindText(new ReadOnlyBinding<String>() { <ide> @Override <ide> public String get() {
Java
apache-2.0
ff434d1909f00ed6dd81e3180aee9b447972e6b1
0
apache/incubator-taverna-common-activities,apache/incubator-taverna-common-activities,apache/incubator-taverna-common-activities
package net.sf.taverna.t2.activities.rest; import java.net.URI; import java.net.URLEncoder; import java.security.Principal; import javax.management.remote.JMXPrincipal; import net.sf.taverna.t2.security.credentialmanager.CredentialManager; import net.sf.taverna.t2.security.credentialmanager.UsernamePassword; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; //import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.log4j.Logger; /** * This CredentialsProvider acts as a mediator between the Apache HttpClient and * Taverna's CredentialManager that stores all user's credentials. * * The only role of it is to retrieve stored details from CredentialManager when * they are required for HTTP authentication. * * @author Sergejs Aleksejevs * @author Alex Nenadic */ public class RESTActivityCredentialsProvider extends BasicCredentialsProvider { private static Logger logger = Logger.getLogger(RESTActivityCredentialsProvider.class); private static final int DEFAULT_HTTP_PORT = 80; private static final int DEFAULT_HTTPS_PORT = 443; private static final String HTTP_PROTOCOL = "http"; private static final String HTTPS_PROTOCOL = "https"; private CredentialManager credentialManager; public RESTActivityCredentialsProvider(CredentialManager credentialManager) { this.credentialManager = credentialManager; } @Override public Credentials getCredentials(AuthScope authscope) { logger.info("Looking for credentials for: Host - " + authscope.getHost() + ";" + "Port - " + authscope.getPort() + ";" + "Realm - " + authscope.getRealm() + ";" + "Authentication scheme - " + authscope.getScheme()); // Ask the superclass first Credentials creds = super.getCredentials(authscope); if (creds != null) { // We have used setCredentials() on this class (for proxy host, // port, username,password) // just before we invoked the http request, which will then pick the // proxy credentials up from here. return creds; } // Otherwise, ask Credential Manager if is can provide the credential String AUTHENTICATION_REQUEST_MSG = "This REST service requires authentication in " + authscope.getRealm(); try { UsernamePassword credentials = null; /* * if port is 80 - use HTTP, don't append port if port is 443 - use * HTTPS, don't append port any other port - append port + do 2 * tests: --- test HTTPS first has...() --- if not there, do * get...() for HTTP (which will save the thing) (save both these * entries for HTTP + HTTPS if not there) */ // build the service URI back to front StringBuilder serviceURI = new StringBuilder(); serviceURI.insert(0, "/#" + URLEncoder.encode(authscope.getRealm(), "UTF-16")); if (authscope.getPort() != DEFAULT_HTTP_PORT && authscope.getPort() != DEFAULT_HTTPS_PORT) { // non-default port - add port name to the URI serviceURI.insert(0, ":" + authscope.getPort()); } serviceURI.insert(0, authscope.getHost()); serviceURI.insert(0, "://"); // now the URI is complete, apart from the protocol name if (authscope.getPort() == DEFAULT_HTTP_PORT || authscope.getPort() == DEFAULT_HTTPS_PORT) { // definitely HTTP or HTTPS serviceURI.insert(0, (authscope.getPort() == DEFAULT_HTTP_PORT ? HTTP_PROTOCOL : HTTPS_PROTOCOL)); // request credentials from CrendentialManager credentials = credentialManager.getUsernameAndPasswordForService( URI.create(serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else { // non-default port - will need to try both HTTP and HTTPS; // just check (no pop-up will be shown) if credentials are there // - one protocol that // matched will be used; if if (credentialManager.hasUsernamePasswordForService(URI.create(HTTPS_PROTOCOL + serviceURI.toString()))) { credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTPS_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else if (credentialManager.hasUsernamePasswordForService(URI.create(HTTP_PROTOCOL + serviceURI.toString()))) { credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTP_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else { // non of the two options succeeded, request details with a // popup for HTTP... credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTP_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); // ...then save a second entry with HTTPS protocol (if the // user has chosen to save the credentials) if (credentials != null && credentials.isShouldSave()) { credentialManager.addUsernameAndPasswordForService(credentials, URI.create(HTTPS_PROTOCOL + serviceURI.toString())); } } } if (credentials != null) { logger.info("Credentials obtained successfully"); return new RESTActivityCredentials(credentials.getUsername(), credentials.getPasswordAsString()); } } catch (Exception e) { logger.error( "Unexpected error while trying to obtain user's credential from CredentialManager", e); } // error or nothing was found logger.info("Credentials not found - the user must have refused to enter them."); return null; } /** * This class encapsulates user's credentials that this CredentialsProvider * can pass to Apache HttpClient. * * @author Sergejs Aleksejevs */ public class RESTActivityCredentials implements Credentials { // this seems to be the simplest existing standard implementation of // Principal interface private final JMXPrincipal user; private final String password; public RESTActivityCredentials(String username, String password) { this.user = new JMXPrincipal(username); this.password = password; } public String getPassword() { return password; } public Principal getUserPrincipal() { return user; } } }
src/main/java/net/sf/taverna/t2/activities/rest/RESTActivityCredentialsProvider.java
package net.sf.taverna.t2.activities.rest; import java.net.URI; import java.net.URLEncoder; import java.security.Principal; import javax.management.remote.JMXPrincipal; import net.sf.taverna.t2.security.credentialmanager.CredentialManager; import net.sf.taverna.t2.security.credentialmanager.UsernamePassword; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; //import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.log4j.Logger; /** * This CredentialsProvider acts as a mediator between the Apache HttpClient and * Taverna's CredentialManager that stores all user's credentials. * * The only role of it is to retrieve stored details from CredentialManager when * they are required for HTTP authentication. * * @author Sergejs Aleksejevs * @author Alex Nenadic */ public class RESTActivityCredentialsProvider extends BasicCredentialsProvider { private static Logger logger = Logger.getLogger(RESTActivityCredentialsProvider.class); private static final int DEFAULT_HTTP_PORT = 80; private static final int DEFAULT_HTTPS_PORT = 443; private static final String HTTP_PROTOCOL = "http"; private static final String HTTPS_PROTOCOL = "https"; private CredentialManager credentialManager; public RESTActivityCredentialsProvider(CredentialManager credentialManager) { this.credentialManager = credentialManager; } @Override public Credentials getCredentials(AuthScope authscope) { logger.info("Looking for credentials for: Host - " + authscope.getHost() + ";" + "Port - " + authscope.getPort() + ";" + "Realm - " + authscope.getRealm() + ";" + "Authentication scheme - " + authscope.getScheme()); // Ask the superclass first Credentials creds = super.getCredentials(authscope); if (creds != null) { // We have used setCredentials() on this class (for proxy host, // port, username,password) // just before we invoked the http request, which will then pick the // proxy credentials up from here. return creds; } // Otherwise, ask Credential Manager if is can provide the credential String AUTHENTICATION_REQUEST_MSG = "This REST service requires authentication in " + authscope.getRealm(); try { UsernamePassword credentials = null; /* * if port is 80 - use HTTP, don't append port if port is 443 - use * HTTPS, don't append port any other port - append port + do 2 * tests: --- test HTTPS first has...() --- if not there, do * get...() for HTTP (which will save the thing) (save both these * entries for HTTP + HTTPS if not there) */ // build the service URI back to front StringBuilder serviceURI = new StringBuilder(); serviceURI.insert(0, "/#" + URLEncoder.encode(authscope.getRealm(), "UTF-16")); if (authscope.getPort() != DEFAULT_HTTP_PORT && authscope.getPort() != DEFAULT_HTTPS_PORT) { // non-default port - add port name to the URI serviceURI.insert(0, ":" + authscope.getPort()); } serviceURI.insert(0, authscope.getHost()); serviceURI.insert(0, "://"); // now the URI is complete, apart from the protocol name if (authscope.getPort() == DEFAULT_HTTP_PORT || authscope.getPort() == DEFAULT_HTTPS_PORT) { // definitely HTTP or HTTPS serviceURI.insert(0, (authscope.getPort() == DEFAULT_HTTP_PORT ? HTTP_PROTOCOL : HTTPS_PROTOCOL)); // request credentials from CrendentialManager credentials = credentialManager.getUsernameAndPasswordForService( URI.create(serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else { // non-default port - will need to try both HTTP and HTTPS; // just check (no pop-up will be shown) if credentials are there // - one protocol that // matched will be used; if if (credentialManager.hasUsernamePasswordForService(URI.create(HTTPS_PROTOCOL + serviceURI.toString()))) { credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTPS_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else if (credentialManager.hasUsernamePasswordForService(URI.create(HTTP_PROTOCOL + serviceURI.toString()))) { credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTP_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); } else { // non of the two options succeeded, request details with a // popup for HTTP... credentials = credentialManager.getUsernameAndPasswordForService( URI.create(HTTP_PROTOCOL + serviceURI.toString()), true, AUTHENTICATION_REQUEST_MSG); // ...then save a second entry with HTTPS protocol (if the // user has chosen to save the credentials) if (credentials != null && credentials.isShouldSave()) { credentialManager.saveUsernameAndPasswordForService(credentials, URI.create(HTTPS_PROTOCOL + serviceURI.toString())); } } } if (credentials != null) { logger.info("Credentials obtained successfully"); return new RESTActivityCredentials(credentials.getUsername(), credentials.getPasswordAsString()); } } catch (Exception e) { logger.error( "Unexpected error while trying to obtain user's credential from CredentialManager", e); } // error or nothing was found logger.info("Credentials not found - the user must have refused to enter them."); return null; } /** * This class encapsulates user's credentials that this CredentialsProvider * can pass to Apache HttpClient. * * @author Sergejs Aleksejevs */ public class RESTActivityCredentials implements Credentials { // this seems to be the simplest existing standard implementation of // Principal interface private final JMXPrincipal user; private final String password; public RESTActivityCredentials(String username, String password) { this.user = new JMXPrincipal(username); this.password = password; } public String getPassword() { return password; } public Principal getUserPrincipal() { return user; } } }
Updated for new CredentialManager API. git-svn-id: 44273e3094f099f2cec6dff006641a6f2e757378@13770 bf327186-88b3-11dd-a302-d386e5130c1c
src/main/java/net/sf/taverna/t2/activities/rest/RESTActivityCredentialsProvider.java
Updated for new CredentialManager API.
<ide><path>rc/main/java/net/sf/taverna/t2/activities/rest/RESTActivityCredentialsProvider.java <ide> // ...then save a second entry with HTTPS protocol (if the <ide> // user has chosen to save the credentials) <ide> if (credentials != null && credentials.isShouldSave()) { <del> credentialManager.saveUsernameAndPasswordForService(credentials, <add> credentialManager.addUsernameAndPasswordForService(credentials, <ide> URI.create(HTTPS_PROTOCOL + serviceURI.toString())); <ide> } <ide> }
Java
mit
b77d32e4599d2e1b82aeb1de2c3a279590b58c4b
0
juckele/vivarium,juckele/vivarium,juckele/vivarium
package io.vivarium.experiment; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import io.vivarium.audit.ActionFrequencyFunction; import io.vivarium.audit.AuditFunction; import io.vivarium.audit.CensusFunction; import io.vivarium.core.Blueprint; import io.vivarium.core.Species; import io.vivarium.scripts.CreateWorld; import io.vivarium.scripts.RunSimulation; import io.vivarium.serialization.FileIO; import io.vivarium.serialization.Format; import io.vivarium.util.Functions; import io.vivarium.util.Rand; import io.vivarium.util.concurrency.ThreadRandAllocator; public class MutationRateLocal { private static final int WORLD_SIZE = 100; private static final int LIFE_TIMES_PER_SIMULATION = 1000; private static final int TICKS_PER_SIMULATION = LIFE_TIMES_PER_SIMULATION * 20_000; private static final int MAX_SIMULATIONS = 101; private static final int PEAK_THREAD_THROUGHPUT = 4; private static final int MIN_MUTATION_EXPONENT = -15; private static final int MAX_MUTATION_EXPONENT = -5; /** * Runs the MutationRate experiment. * * Hypothesis: There is an optimal mutation rate for population health. * * @param args * @throws InterruptedException * Not really... */ public static void main(String[] args) throws InterruptedException { // If we're running multi-threaded code, we need to use a multi-threaded random allocator Rand.setAllocator(new ThreadRandAllocator()); // Set up thread pool ExecutorService executorService = Executors.newFixedThreadPool(PEAK_THREAD_THROUGHPUT); Collection<WorldRunner> tasks = new LinkedList<>(); double[] mutationRates = Functions.generateDitherArray(MIN_MUTATION_EXPONENT, MAX_MUTATION_EXPONENT, MAX_SIMULATIONS); for (int i = 0; i < MAX_SIMULATIONS; i++) { double mutationRateExponent = Math.round(mutationRates[i] * 100) / 100.0; // Record the thread name String name = "mutation=2^" + mutationRateExponent; // Make a blueprint Blueprint blueprint = Blueprint.makeDefault(); blueprint.setSize(WORLD_SIZE); // Set species ArrayList<Species> speciesList = new ArrayList<>(); Species species = Species.makeDefault(); species.setMutationRateExponent(mutationRateExponent); species.setNormalizeAfterMutation(Math.sqrt(42)); speciesList.add(species); blueprint.setSpecies(speciesList); // Set audit functions ArrayList<AuditFunction> auditFunctions = new ArrayList<>(); auditFunctions.add(new ActionFrequencyFunction()); auditFunctions.add(new CensusFunction()); blueprint.setAuditFunctions(auditFunctions); // Save the blueprint FileIO.saveSerializer(blueprint, name + "_blueprint.viv", Format.JSON); // Create callable log("Generating blueprint " + name); tasks.add(new WorldRunner(name)); } // Do the work! executorService.invokeAll(tasks); log("Awating"); executorService.shutdown(); executorService.awaitTermination(10_000, TimeUnit.DAYS); log("Completed"); } private static class WorldRunner implements Callable<Object> { private final String _name; WorldRunner(String name) { this._name = name; } @Override public Object call() throws Exception { // Create a world { log("Generating world " + _name); String[] args = { "-b", _name + "_blueprint.viv", "-o", _name + "_initial.viv" }; CreateWorld.main(args); } // Run the world { log("Starting simulation of world " + _name); String[] args = { "-i", _name + "_initial.viv", "-o", _name + "_complete.viv", "-t", "" + TICKS_PER_SIMULATION }; RunSimulation.main(args); log("Completeing simulation of world " + _name); } return null; } } private synchronized static void log(String event) { System.out.println(event); } }
vivarium-scripts/src/main/java/io/vivarium/experiment/MutationRateLocal.java
package io.vivarium.experiment; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import io.vivarium.audit.ActionFrequencyFunction; import io.vivarium.audit.AuditFunction; import io.vivarium.audit.CensusFunction; import io.vivarium.core.Blueprint; import io.vivarium.core.Species; import io.vivarium.scripts.CreateWorld; import io.vivarium.scripts.RunSimulation; import io.vivarium.serialization.FileIO; import io.vivarium.serialization.Format; import io.vivarium.util.Functions; import io.vivarium.util.Rand; import io.vivarium.util.concurrency.ThreadRandAllocator; public class MutationRateLocal { private static final int WORLD_SIZE = 100; private static final int LIFE_TIMES_PER_SIMULATION = 100; private static final int TICKS_PER_SIMULATION = LIFE_TIMES_PER_SIMULATION * 20_000; private static final int MAX_SIMULATIONS = 101; private static final int PEAK_THREAD_THROUGHPUT = 4; private static final int MIN_MUTATION_EXPONENT = -15; private static final int MAX_MUTATION_EXPONENT = -5; /** * Runs the MutationRate experiment. * * Hypothesis: There is an optimal mutation rate for population health. * * @param args * @throws InterruptedException * Not really... */ public static void main(String[] args) throws InterruptedException { // If we're running multi-threaded code, we need to use a multi-threaded random allocator Rand.setAllocator(new ThreadRandAllocator()); // Set up thread pool ExecutorService executorService = Executors.newFixedThreadPool(PEAK_THREAD_THROUGHPUT); Collection<WorldRunner> tasks = new LinkedList<>(); double[] mutationRates = Functions.generateDitherArray(MIN_MUTATION_EXPONENT, MAX_MUTATION_EXPONENT, MAX_SIMULATIONS); for (int i = 0; i < MAX_SIMULATIONS; i++) { double mutationRateExponent = Math.round(mutationRates[i] * 100) / 100.0; // Record the thread name String name = "mutation=2^" + mutationRateExponent; // Make a blueprint Blueprint blueprint = Blueprint.makeDefault(); blueprint.setSize(WORLD_SIZE); // Set species ArrayList<Species> speciesList = new ArrayList<>(); Species species = Species.makeDefault(); species.setMutationRateExponent(mutationRateExponent); species.setNormalizeAfterMutation(Math.sqrt(42)); speciesList.add(species); blueprint.setSpecies(speciesList); // Set audit functions ArrayList<AuditFunction> auditFunctions = new ArrayList<>(); auditFunctions.add(new ActionFrequencyFunction()); auditFunctions.add(new CensusFunction()); blueprint.setAuditFunctions(auditFunctions); // Save the blueprint FileIO.saveSerializer(blueprint, name + "_blueprint.viv", Format.JSON); // Create callable log("Generating blueprint " + name); tasks.add(new WorldRunner(name)); } // Do the work! executorService.invokeAll(tasks); log("Awating"); executorService.shutdown(); executorService.awaitTermination(10_000, TimeUnit.DAYS); log("Completed"); } private static class WorldRunner implements Callable<Object> { private final String _name; WorldRunner(String name) { this._name = name; } @Override public Object call() throws Exception { // Create a world { log("Generating world " + _name); String[] args = { "-b", _name + "_blueprint.viv", "-o", _name + "_initial.viv" }; CreateWorld.main(args); } // Run the world { log("Starting simulation of world " + _name); String[] args = { "-i", _name + "_initial.viv", "-o", _name + "_complete.viv", "-t", "" + TICKS_PER_SIMULATION }; RunSimulation.main(args); log("Completeing simulation of world " + _name); } return null; } } private synchronized static void log(String event) { System.out.println(event); } }
New run of mutation experiment for 1000 lifetimes
vivarium-scripts/src/main/java/io/vivarium/experiment/MutationRateLocal.java
New run of mutation experiment for 1000 lifetimes
<ide><path>ivarium-scripts/src/main/java/io/vivarium/experiment/MutationRateLocal.java <ide> public class MutationRateLocal <ide> { <ide> private static final int WORLD_SIZE = 100; <del> private static final int LIFE_TIMES_PER_SIMULATION = 100; <add> private static final int LIFE_TIMES_PER_SIMULATION = 1000; <ide> private static final int TICKS_PER_SIMULATION = LIFE_TIMES_PER_SIMULATION * 20_000; <ide> private static final int MAX_SIMULATIONS = 101; <ide> private static final int PEAK_THREAD_THROUGHPUT = 4;
Java
epl-1.0
590ee0a87c7aa99e0ffdc23fc304750a2e732263
0
Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,rrimmana/birt-1,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt
/******************************************************************************* * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.data.engine.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.birt.core.data.DataType; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.script.ScriptContext; import org.eclipse.birt.data.engine.api.IBaseExpression; import org.eclipse.birt.data.engine.api.IBaseQueryDefinition; import org.eclipse.birt.data.engine.api.IBaseQueryResults; import org.eclipse.birt.data.engine.api.IBinding; import org.eclipse.birt.data.engine.api.IComputedColumn; import org.eclipse.birt.data.engine.api.IConditionalExpression; import org.eclipse.birt.data.engine.api.IFilterDefinition; import org.eclipse.birt.data.engine.api.IGroupDefinition; import org.eclipse.birt.data.engine.api.IQueryDefinition; import org.eclipse.birt.data.engine.api.IResultMetaData; import org.eclipse.birt.data.engine.api.ISortDefinition; import org.eclipse.birt.data.engine.api.querydefn.Binding; import org.eclipse.birt.data.engine.api.querydefn.ComputedColumn; import org.eclipse.birt.data.engine.api.querydefn.ConditionalExpression; import org.eclipse.birt.data.engine.api.querydefn.FilterDefinition; import org.eclipse.birt.data.engine.api.querydefn.ScriptExpression; import org.eclipse.birt.data.engine.api.script.IDataSourceInstanceHandle; import org.eclipse.birt.data.engine.core.DataException; import org.eclipse.birt.data.engine.executor.BaseQuery; import org.eclipse.birt.data.engine.executor.JointDataSetQuery; import org.eclipse.birt.data.engine.expression.ExpressionCompilerUtil; import org.eclipse.birt.data.engine.expression.ExpressionProcessor; import org.eclipse.birt.data.engine.i18n.ResourceConstants; import org.eclipse.birt.data.engine.impl.aggregation.AggregateTable; import org.eclipse.birt.data.engine.impl.group.GroupCalculatorFactory; import org.eclipse.birt.data.engine.odi.ICandidateQuery; import org.eclipse.birt.data.engine.odi.IDataSource; import org.eclipse.birt.data.engine.odi.IEventHandler; import org.eclipse.birt.data.engine.odi.IPreparedDSQuery; import org.eclipse.birt.data.engine.odi.IQuery; import org.eclipse.birt.data.engine.odi.IResultClass; import org.eclipse.birt.data.engine.odi.IResultIterator; import org.eclipse.birt.data.engine.odi.IResultObjectEvent; import org.eclipse.birt.data.engine.olap.api.ICubeQueryResults; import org.eclipse.birt.data.engine.olap.script.JSCubeBindingObject; import org.eclipse.birt.data.engine.script.OnFetchScriptHelper; import org.eclipse.birt.data.engine.script.ScriptConstants; import org.mozilla.javascript.Context; import org.mozilla.javascript.Scriptable; import com.ibm.icu.text.Collator; /** * */ public abstract class QueryExecutor implements IQueryExecutor { private IBaseQueryDefinition baseQueryDefn; private AggregateTable aggrTable; // from PreparedQuery->PreparedDataSourceQuery->DataEngineImpl private Scriptable sharedScope; /** Externally provided query scope; can be null */ // from PreparedQuery->PreparedDataSourceQuery private Scriptable parentScope; // for query execution private Scriptable queryScope; private boolean isPrepared = false; private boolean isExecuted = false; private boolean loadFromCache; private Map queryAppContext; /** Query nesting level, 1 - outermost query */ private int nestedLevel = 1; /** Runtime data source and data set used by this instance of executor */ protected DataSourceRuntime dataSource; protected DataSetRuntime dataSet; protected IDataSource odiDataSource; protected IQuery odiQuery; /** Outer query's results; null if this query is not nested */ protected IQueryService tabularOuterResults; private IResultIterator odiResult; private IExecutorHelper parentHelper; private DataEngineSession session; protected List temporaryComputedColumns = new ArrayList( ); private static Logger logger = Logger.getLogger( QueryExecutor.class.getName( ) ); /** * @param sharedScope * @param baseQueryDefn * @param aggrTable */ QueryExecutor( Scriptable sharedScope, IBaseQueryDefinition baseQueryDefn, AggregateTable aggrTable, DataEngineSession session ) { Object[] params = { sharedScope, baseQueryDefn, aggrTable, session }; logger.entering( QueryExecutor.class.getName( ), "QueryExecutor", params ); this.sharedScope = sharedScope; this.baseQueryDefn = baseQueryDefn; this.aggrTable = aggrTable; this.session = session; logger.exiting( QueryExecutor.class.getName( ), "QueryExecutor" ); } /** * Provide the actual DataSourceRuntime used for the query. * * @return */ abstract protected DataSourceRuntime findDataSource( ) throws DataException; /** * Create a new instance of data set runtime * * @return */ abstract protected DataSetRuntime newDataSetRuntime( ) throws DataException; /** * Create a new unopened odiDataSource given the data source runtime * definition * * @return */ abstract protected IDataSource createOdiDataSource( ) throws DataException; /** * Create an empty instance of odi query * * @return */ abstract protected IQuery createOdiQuery( ) throws DataException; /** * Prepares the ODI query */ protected void prepareOdiQuery( ) throws DataException { } /** * * @throws DataException */ protected void dataSourceBeforeOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSource.beforeOpen( ); } } /** * * @throws DataException */ protected void dataSourceAfterOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSource.afterOpen( ); } } /** * * @throws DataException */ protected void dataSetBeforeOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.beforeOpen( ); } } /** * * @throws DataException */ protected void dataSetAfterOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.afterOpen( ); } } /** * * @throws DataException */ protected void dataSetBeforeClose( ) throws DataException { if ( !this.loadFromCache ) { dataSet.beforeClose( ); } } /** * * @throws DataException */ protected void dataSetAfterClose( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.afterClose( ); } } /** * Executes the ODI query to reproduce a ODI result set * @param eventHandler * @param stopSign * @return */ abstract protected IResultIterator executeOdiQuery( IEventHandler eventHandler, StopSign stopSign ) throws DataException; /** * @param context */ void setAppContext( Map context ) { queryAppContext = context; } /** * Prepare Executor so that it is ready to execute the query * * @param outerRts * @param targetScope * @throws DataException */ void prepareExecution( IBaseQueryResults outerRts, Scriptable targetScope ) throws DataException { if ( isPrepared ) return; this.parentScope = targetScope; dataSource = findDataSource( ); if ( outerRts != null && ( outerRts instanceof IQueryService || outerRts instanceof ICubeQueryResults )) { if ( outerRts instanceof IQueryService ) { tabularOuterResults = ( (IQueryService) outerRts ); if ( tabularOuterResults.isClosed( ) ) { // Outer result is closed; invalid throw new DataException( ResourceConstants.RESULT_CLOSED ); } this.nestedLevel = tabularOuterResults.getNestedLevel( ); // TODO: check helper is null IExecutorHelper helper = tabularOuterResults.getExecutorHelper( ); this.setParentExecutorHelper( helper ); } else if( outerRts instanceof ICubeQueryResults ) { ExecutorHelper helper = new ExecutorHelper( null ); helper.setScriptable( new JSCubeBindingObject( ( (ICubeQueryResults) outerRts ).getCubeCursor( ) ) ); this.setParentExecutorHelper( helper ); } } // Create the data set runtime // Since data set runtime contains the execution result, a new data set // runtime is needed for each execute dataSet = newDataSetRuntime( ); assert dataSet != null; //For cached data set, we need not execute any scripts. loadFromCache = loadFromCache( ); dataSet.setFromCache( loadFromCache ); openDataSource( ); // Run beforeOpen script now so the script can modify the // DataSetRuntime properties dataSetBeforeOpen( ); // Let subclass create a new and empty intance of the appropriate // odi IQuery odiQuery = createOdiQuery( ); odiQuery.setDistinctValueFlag( dataSet.needDistinctValue( ) ); odiQuery.setQueryDefinition( this.baseQueryDefn ); odiQuery.setExprProcessor( new ExpressionProcessor( dataSet ) ); //Set the row fetch limit for the IQuery instance.The row fetch limit //is the number of rows that a data set can fetch from data source. if( dataSet.getDesign( ) != null ) { //When it is not a subquery, the property "row fetch limit" should be applied //to the query. odiQuery.setRowFetchLimit( dataSet.getDesign( ).getRowFetchLimit( ) ); } populateOdiQuery( ); prepareOdiQuery( ); isPrepared = true; } /** * * @return * @throws DataException */ private boolean loadFromCache( ) throws DataException { if( this.dataSource == null ) return false; if ( !( this.baseQueryDefn instanceof IQueryDefinition ) ) return false; return this.session.getDataSetCacheManager( ) .doesLoadFromCache( this.dataSource.getDesign( ), this.dataSet.getDesign( ), new ParameterUtil( this.tabularOuterResults == null ? null : this.tabularOuterResults.getQueryScope( ), this.dataSet, ( IQueryDefinition )this.baseQueryDefn, this.getQueryScope( ), session.getEngineContext( ).getScriptContext( )).resolveDataSetParameters( true ), this.queryAppContext ); } /** * Open the required DataSource. This method should be called after * "dataSource" is initialized by findDataSource() method. * * @throws DataException */ protected void openDataSource( ) throws DataException { assert odiDataSource == null; // Open the underlying data source // dataSource = findDataSource( ); if ( dataSource != null ) { // TODO: potential bug if ( !dataSource.isOpen( ) || session.getDataSetCacheManager( ).needsToCache( )) { // Data source is not open; create an Odi Data Source and open it // We should run the beforeOpen script now to give it a chance to modify // runtime data source properties dataSourceBeforeOpen( ); // Let subclass create a new unopened odi data source odiDataSource = createOdiDataSource( ); // Passes thru the prepared query executor's // context to the new odi data source odiDataSource.setAppContext( queryAppContext ); // Open the odi data source dataSource.openOdiDataSource( odiDataSource ); dataSourceAfterOpen( ); } else { // Use existing odiDataSource created for the data source runtime odiDataSource = dataSource.getOdiDataSource( ); // Passes thru the prepared query executor's // current context to existing data source odiDataSource.setAppContext( queryAppContext ); } } } /** * Populates odiQuery with this query's definitions * * @throws DataException */ protected void populateOdiQuery( ) throws DataException { assert odiQuery != null; assert this.baseQueryDefn != null; // Set grouping populateGrouping( session.getEngineContext( ).getScriptContext( ).getContext( ) ); // Set sorting populateSorting( ); // set fetch event populateFetchEvent( session.getEngineContext( ).getScriptContext( ) ); // specify max rows the query should fetch odiQuery.setMaxRows( this.baseQueryDefn.getMaxRows( ) ); prepareCacheQuery( this.odiQuery ); } /** * TODO: enhance me, this is only a temp logic * Set temporary computed columns to DataSourceQuery where cache is used */ protected void prepareCacheQuery( IQuery odiQuery ) { if ( temporaryComputedColumns != null && temporaryComputedColumns.size( ) > 0 ) { if ( odiQuery instanceof org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery ) { ( (org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery) odiQuery ).setTempComputedColumn( this.temporaryComputedColumns ); } else if ( odiQuery instanceof org.eclipse.birt.data.engine.executor.dscache.CandidateQuery ) { ( (org.eclipse.birt.data.engine.executor.dscache.CandidateQuery) odiQuery ).setTempComputedColumn( this.temporaryComputedColumns ); } } } /** * Populate grouping to the query. * * @param cx * @throws DataException */ private void populateGrouping( Context cx ) throws DataException { List groups = this.baseQueryDefn.getGroups( ); if ( groups != null && !groups.isEmpty( ) ) { IQuery.GroupSpec[] groupSpecs = new IQuery.GroupSpec[groups.size( )]; Iterator it = groups.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IGroupDefinition src = (IGroupDefinition) it.next( ); validateGroupExpression( src ); String expr = getGroupKeyExpression( src ); String groupName = populateGroupName( i, expr ); int dataType = getColumnDataType( cx, expr ); IQuery.GroupSpec dest = QueryExecutorUtil.groupDefnToSpec( cx, src, expr, groupName, -1, dataType, this.baseQueryDefn.getQueryExecutionHints( ) == null ? true : this.baseQueryDefn.getQueryExecutionHints( ) .doSortBeforeGrouping( ) ); groupSpecs[i] = dest; this.temporaryComputedColumns.add( getComputedColumnInstance( cx, groupSpecs[i].getInterval( ), src, expr, groupName, dest, dataType) ); } odiQuery.setGrouping( Arrays.asList( groupSpecs ) ); } } /** * Validating the group expression. * * @param src * @throws DataException */ private void validateGroupExpression( IGroupDefinition src ) throws DataException { if ( ( src.getKeyColumn( ) == null || src.getKeyColumn( ) .trim( ) .length( ) == 0 ) && ( src.getKeyExpression( ) == null || src.getKeyExpression( ) .trim( ) .length( ) == 0 ) ) throw new DataException( ResourceConstants.BAD_GROUP_EXPRESSION ); } /** * Populate the group name according to the given expression. * * @param i * @param expr * @return */ private String populateGroupName( int i, String expr ) { String groupName; if ( expr.trim( ).equalsIgnoreCase( "row[0]" ) || expr.trim( ).equalsIgnoreCase( "row._rowPosition" ) || expr.trim( ).equalsIgnoreCase( "dataSetRow[0]" ) || expr.trim( ) .equalsIgnoreCase( "dataSetRow._rowPosition" ) ) { groupName = "_{$TEMP_GROUP_" + i + "ROWID$}_"; } else { groupName = "_{$TEMP_GROUP_" + i + "$}_"; } return groupName; } /** * Get the computed column instance according to the group type.If group has * interval, return GroupComputedColumn, otherwise return normal computed * column. * * @param cx * @param groupSpecs * @param i * @param src * @param expr * @param groupName * @param dest * @return * @throws DataException */ private IComputedColumn getComputedColumnInstance( Context cx, int interval, IGroupDefinition src, String expr, String groupName, IQuery.GroupSpec dest, int dataType) throws DataException { if ( dest.getInterval( ) != IGroupDefinition.NO_INTERVAL ) { return new GroupComputedColumn( groupName, expr, QueryExecutorUtil.getTempComputedColumnType( interval ), GroupCalculatorFactory.getGroupCalculator( src.getInterval( ), src.getIntervalStart( ), src.getIntervalRange( ), dataType) ); } else { return new ComputedColumn( groupName, expr, dataType ); } } /** * Populate the sortings in a query. * * @throws DataException */ private void populateSorting( ) throws DataException { List sorts = this.baseQueryDefn.getSorts( ); if ( sorts != null && !sorts.isEmpty( ) ) { IQuery.SortSpec[] sortSpecs = new IQuery.SortSpec[sorts.size( )]; Iterator it = sorts.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { ISortDefinition src = (ISortDefinition) it.next( ); int sortIndex = -1; String sortKey = src.getColumn( ); if ( sortKey == null ) sortKey = src.getExpression( ).getText( ); else { sortKey = getColumnRefExpression( sortKey ); } temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_SORT_" + i + "$}_", sortKey, getExpressionDataType( sortKey ) ) ); sortIndex = -1; sortKey = String.valueOf( "_{$TEMP_SORT_" + i + "$}_"); IQuery.SortSpec dest = new IQuery.SortSpec( sortIndex, sortKey, src.getSortDirection( ) == ISortDefinition.SORT_ASC, src.getSortStrength( ) == -1? null:Collator.getInstance( )); sortSpecs[i] = dest; } odiQuery.setOrdering( Arrays.asList( sortSpecs ) ); } } /** * * @param expression * @return * @throws DataException */ private int getExpressionDataType( String expression ) throws DataException { try { if( expression == null ) return DataType.ANY_TYPE; String bindingName = ExpressionUtil.getColumnBindingName( expression ); if( bindingName == null ) return DataType.ANY_TYPE; if ( bindingName.equals( ScriptConstants.ROW_NUM_KEYWORD ) ) return DataType.INTEGER_TYPE; Object binding = this.baseQueryDefn.getBindings( ).get( bindingName ); if( binding == null ) return DataType.ANY_TYPE; int dataType = ( (IBinding) binding ).getDataType( ); if( dataType != DataType.UNKNOWN_TYPE ) return dataType; else return DataType.ANY_TYPE; } catch ( BirtException e ) { throw DataException.wrap( e ); } } /** * * @param cx * @throws DataException */ private void populateFetchEvent( ScriptContext cx ) throws DataException { List dataSetFilters = new ArrayList( ); List queryFilters = new ArrayList( ); List aggrFilters = new ArrayList( ); List dataSetAggrFilters = new ArrayList( ); if ( dataSet.getFilters( ) != null ) { Map bindings = createBindingFromComputedColumn( dataSet.getComputedColumns( )); for ( int i = 0; i < dataSet.getFilters( ).size( ); i++ ) { if ( QueryExecutorUtil.isAggrFilter( (IFilterDefinition) dataSet.getFilters( ) .get( i ), bindings ) ) { dataSetAggrFilters.add( dataSet.getFilters( ).get( i ) ); } else { dataSetFilters.add( dataSet.getFilters( ).get( i ) ); } } } if ( this.baseQueryDefn.getFilters( ) != null ) { for ( int i = 0; i < this.baseQueryDefn.getFilters( ).size( ); i++ ) { if ( QueryExecutorUtil.isAggrFilter( (IFilterDefinition) this.baseQueryDefn.getFilters( ) .get( i ), this.baseQueryDefn.getBindings( ) ) ) { aggrFilters.add( this.baseQueryDefn.getFilters( ).get( i ) ); } else { queryFilters.add( this.baseQueryDefn.getFilters( ).get( i ) ); } } } //When prepare filters, the temporaryComputedColumns would also be effect. List multipassFilters = prepareFilters( cx, dataSetFilters, queryFilters, temporaryComputedColumns ); //******************populate the onFetchEvent below**********************/ List computedColumns = null; // set computed column event computedColumns = this.dataSet.getComputedColumns( ); if ( computedColumns == null ) computedColumns = new ArrayList( ); if ( computedColumns.size( ) > 0 || temporaryComputedColumns.size( ) > 0 ) { IResultObjectEvent objectEvent = new ComputedColumnHelper( this.dataSet, computedColumns, temporaryComputedColumns, cx ); odiQuery.addOnFetchEvent( objectEvent ); this.dataSet.getComputedColumns( ) .addAll( temporaryComputedColumns ); } if ( dataSet.getEventHandler( ) != null ) { OnFetchScriptHelper event = new OnFetchScriptHelper( dataSet ); odiQuery.addOnFetchEvent( event ); } if ( dataSetFilters.size( ) + queryFilters.size( ) + multipassFilters.size( ) + aggrFilters.size( ) + dataSetAggrFilters.size( ) > 0 ) { IResultObjectEvent objectEvent = new FilterByRow( dataSetFilters, queryFilters, multipassFilters, aggrFilters, dataSetAggrFilters, dataSet ); odiQuery.addOnFetchEvent( objectEvent ); } } /** * * @param computedColumns * @return * @throws DataException */ private Map<String, IBinding> createBindingFromComputedColumn( List computedColumns ) throws DataException { Map<String, IBinding> result = new HashMap<String, IBinding>(); if( computedColumns == null || computedColumns.size( ) == 0 ) return result; for( Object computedColumn: computedColumns ) { IComputedColumn cc = (IComputedColumn)computedColumn; IBinding binding = new Binding( cc.getName( ) ); binding.setExpression( cc.getExpression( ) ); binding.setAggrFunction( cc.getAggregateFunction( ) ); result.put( cc.getName( ), binding ); } return result; } /** * get the data type of a expression * @param cx * @param expr * @return * @throws DataException */ private int getColumnDataType( Context cx, String expr ) throws DataException { String columnName = QueryExecutorUtil.getColInfoFromJSExpr( cx, expr ) .getColumnName( ); if ( columnName == null ) { return DataType.UNKNOWN_TYPE; } if ( columnName.equals( ScriptConstants.ROW_NUM_KEYWORD ) ) { return DataType.INTEGER_TYPE; } Object baseExpr = ( this.baseQueryDefn.getBindings( ).get( columnName ) ); if ( baseExpr == null ) { return DataType.UNKNOWN_TYPE; } int dataType = ( (IBinding) baseExpr ).getExpression( ).getDataType( ); if( dataType == DataType.UNKNOWN_TYPE ) return DataType.ANY_TYPE; return dataType; } /** * @param src * @return */ private String getGroupKeyExpression( IGroupDefinition src ) { String expr = src.getKeyColumn( ); if ( expr == null ) { expr = src.getKeyExpression( ); } else { expr = getColumnRefExpression( expr ); } return expr; } /** * * @param expr * @return */ private String getColumnRefExpression( String expr ) { return ExpressionUtil.createJSRowExpression( expr ); } void setParentExecutorHelper( IExecutorHelper helper ) { this.parentHelper = helper; } /** * * @param cx * @param dataSetFilters * @param queryFilters * @param temporaryComputedColumns * @return * @throws DataException */ private List prepareFilters( ScriptContext cx, List dataSetFilters, List queryFilters, List temporaryComputedColumns ) throws DataException { List result = new ArrayList( ); /*List allFilter = new ArrayList(); allFilter.addAll( dataSetFilters ); allFilter.addAll( queryFilters ); prepareFilter( cx, allFilter, temporaryComputedColumns, result ); */ prepareFilter( cx, dataSetFilters,temporaryComputedColumns, result ); prepareFilter( cx, queryFilters,temporaryComputedColumns, result ); return result; } /** * * @param cx * @param dataSetFilters * @param temporaryComputedColumns * @param result * @throws DataException */ private void prepareFilter( ScriptContext cx, List dataSetFilters, List temporaryComputedColumns, List result ) throws DataException { if ( dataSetFilters != null && !dataSetFilters.isEmpty( ) ) { Iterator it = dataSetFilters.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IFilterDefinition src = (IFilterDefinition) it.next( ); IBaseExpression expr = src.getExpression( ); if ( isGroupFilter( src ) ) { ConditionalExpression ce = ( (ConditionalExpression) expr ); String exprText = ce.getExpression( ).getText( ); ColumnInfo columnInfo = QueryExecutorUtil.getColInfoFromJSExpr( cx.getContext( ), exprText ); int index = columnInfo.getColumnIndex( ); String name = columnInfo.getColumnName( ); if ( name == null && index < 0 ) { int currentIndex = result.size( ); // If failed to treate filter key as a column reference // expression // then treat it as a computed column expression temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_FILTER_" + currentIndex + "$}_", exprText, DataType.ANY_TYPE ) ); it.remove( ); result.add( new FilterDefinition( new ConditionalExpression( new ScriptExpression( String.valueOf( "dataSetRow[\"_{$TEMP_FILTER_" + currentIndex + "$}_\"]" ) ), ce.getOperator( ), ce.getOperand1( ), ce.getOperand2( ) ) ) ); } } } } } /** * * @param filter * @return * @throws DataException */ private boolean isGroupFilter( IFilterDefinition filter ) throws DataException { IBaseExpression expr = filter.getExpression( ); if ( expr instanceof IConditionalExpression ) { if ( !ExpressionCompilerUtil.isValidExpressionInQueryFilter( expr, session.getEngineContext( ) .getScriptContext( ) .getContext( ) ) ) throw new DataException( ResourceConstants.INVALID_DEFINITION_IN_FILTER, new Object[]{ ( (IConditionalExpression) expr ).getExpression( ) .getText( ) } ); try { if ( odiQuery instanceof BaseQuery ) { return ( (BaseQuery) odiQuery ).getExprProcessor( ) .hasAggregation( expr ); } } catch ( DataException e ) { return true; } } return false; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getResultMetaData() */ public IResultMetaData getResultMetaData( ) throws DataException { assert odiQuery instanceof IPreparedDSQuery || odiQuery instanceof ICandidateQuery || odiQuery instanceof JointDataSetQuery; if ( odiQuery instanceof IPreparedDSQuery ) { if ( ( (IPreparedDSQuery) odiQuery ).getResultClass( ) != null ) return new ColumnBindingMetaData( baseQueryDefn, ( (IPreparedDSQuery) odiQuery ).getResultClass( ) ); else return null; } else if ( odiQuery instanceof JointDataSetQuery ) { return new ColumnBindingMetaData( baseQueryDefn, ( (JointDataSetQuery) odiQuery ).getResultClass( ) ); } else { return new ColumnBindingMetaData( baseQueryDefn, ( (ICandidateQuery) odiQuery ).getResultClass( ) ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getOdiResultClass() */ public IResultClass getOdiResultClass( ) throws DataException { assert odiQuery instanceof IPreparedDSQuery || odiQuery instanceof ICandidateQuery || odiQuery instanceof JointDataSetQuery; if ( odiQuery instanceof IPreparedDSQuery ) { return ( (IPreparedDSQuery) odiQuery ).getResultClass( ); } else if ( odiQuery instanceof JointDataSetQuery ) { return ( (JointDataSetQuery) odiQuery ).getResultClass( ); } else { return ( (ICandidateQuery) odiQuery ).getResultClass( ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#execute() */ public void execute( IEventHandler eventHandler, StopSign stopSign ) throws DataException { logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Start to execute" ); if ( this.isExecuted ) return; ExecutorHelper helper = new ExecutorHelper( this.parentHelper ); eventHandler.setExecutorHelper( helper ); // Execute the query odiResult = executeOdiQuery( eventHandler, stopSign ); helper.setScriptable( this.dataSet.getJSResultRowObject( ) ); resetComputedColumns( ); // Bind the row object to the odi result set this.dataSet.setResultSet( odiResult, false ); // Calculate aggregate values //this.aggrTable.calculate( odiResult, getQueryScope( ) ); this.isExecuted = true; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Finish executing" ); } /** * reset computed columns */ private void resetComputedColumns( ) { List l = this.getDataSet( ).getComputedColumns( ); if ( l != null ) l.removeAll( this.temporaryComputedColumns ); } /* * Closes the executor; release all odi resources * * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#close() */ public void close( ) { if ( odiQuery == null ) { // already closed logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); return; } // Close the data set and associated odi query try { dataSetBeforeClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } if ( odiResult != null ) { try { odiResult.close( ); } catch ( DataException e1 ) { // TODO Auto-generated catch block e1.printStackTrace( ); } } odiQuery.close( ); try { dataSet.close( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } odiQuery = null; odiDataSource = null; odiResult = null; queryScope = null; isPrepared = false; isExecuted = false; // Note: reset dataSet and dataSource only after afterClose() is executed, since // the script may access these two objects try { dataSetAfterClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } dataSet = null; dataSource = null; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSet() */ public DataSetRuntime getDataSet( ) { return dataSet; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getSharedScope() */ public Scriptable getSharedScope( ) { return this.sharedScope; } /** * Gets the Javascript scope for evaluating expressions for this query * * @return */ public Scriptable getQueryScope( ) { if ( queryScope == null ) { // Set up a query scope. All expressions are evaluated against the // Data set JS object as the prototype (so that it has access to all // data set properties). It uses a subscope of the externally provided // parent scope, or the global shared scope queryScope = newSubScope( parentScope ); queryScope.setPrototype( dataSet.getJSDataSetObject( ) ); } return queryScope; } /** * Creates a subscope within parent scope * @param parentAndProtoScope parent scope. If null, the shared top-level scope is used as parent */ private Scriptable newSubScope( Scriptable parentAndProtoScope ) { if ( parentAndProtoScope == null ) parentAndProtoScope = sharedScope; Scriptable scope = session.getEngineContext( ) .getScriptContext( ) .getContext( ) .newObject( parentAndProtoScope ); scope.setParentScope( parentAndProtoScope ); scope.setPrototype( parentAndProtoScope ); return scope; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedLevel() */ public int getNestedLevel( ) { return this.nestedLevel; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSourceInstanceHandle() */ public IDataSourceInstanceHandle getDataSourceInstanceHandle( ) { return this.dataSource; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getJSAggrValueObject() */ public Scriptable getJSAggrValueObject( ) { return this.aggrTable.getJSAggrValueObject( ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedDataSets(int) */ public DataSetRuntime[] getNestedDataSets( int nestedCount ) { return tabularOuterResults == null ? null : tabularOuterResults.getDataSetRuntime( nestedCount ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getOdiResultSet() */ public IResultIterator getOdiResultSet( ) { return this.odiResult; } /** * @param evaluateValue * @return * @throws DataException */ protected Collection resolveDataSetParameters( boolean evaluateValue ) throws DataException { return new ParameterUtil( this.tabularOuterResults == null ? null:this.tabularOuterResults.getQueryScope( ), this.getDataSet( ), (IQueryDefinition) this.baseQueryDefn, this.getQueryScope( ), session.getEngineContext( ).getScriptContext( )).resolveDataSetParameters( evaluateValue ); } /* * (non-Javadoc) * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getAppContext() */ public Map getAppContext() { return this.queryAppContext; } public DataEngineSession getSession() { return this.session; } }
data/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/QueryExecutor.java
/******************************************************************************* * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.data.engine.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.birt.core.data.DataType; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.script.ScriptContext; import org.eclipse.birt.data.engine.api.IBaseExpression; import org.eclipse.birt.data.engine.api.IBaseQueryDefinition; import org.eclipse.birt.data.engine.api.IBaseQueryResults; import org.eclipse.birt.data.engine.api.IBinding; import org.eclipse.birt.data.engine.api.IComputedColumn; import org.eclipse.birt.data.engine.api.IConditionalExpression; import org.eclipse.birt.data.engine.api.IFilterDefinition; import org.eclipse.birt.data.engine.api.IGroupDefinition; import org.eclipse.birt.data.engine.api.IQueryDefinition; import org.eclipse.birt.data.engine.api.IResultMetaData; import org.eclipse.birt.data.engine.api.ISortDefinition; import org.eclipse.birt.data.engine.api.querydefn.Binding; import org.eclipse.birt.data.engine.api.querydefn.ComputedColumn; import org.eclipse.birt.data.engine.api.querydefn.ConditionalExpression; import org.eclipse.birt.data.engine.api.querydefn.FilterDefinition; import org.eclipse.birt.data.engine.api.querydefn.ScriptExpression; import org.eclipse.birt.data.engine.api.script.IDataSourceInstanceHandle; import org.eclipse.birt.data.engine.core.DataException; import org.eclipse.birt.data.engine.executor.BaseQuery; import org.eclipse.birt.data.engine.executor.JointDataSetQuery; import org.eclipse.birt.data.engine.expression.ExpressionCompilerUtil; import org.eclipse.birt.data.engine.expression.ExpressionProcessor; import org.eclipse.birt.data.engine.i18n.ResourceConstants; import org.eclipse.birt.data.engine.impl.aggregation.AggregateTable; import org.eclipse.birt.data.engine.impl.group.GroupCalculatorFactory; import org.eclipse.birt.data.engine.odi.ICandidateQuery; import org.eclipse.birt.data.engine.odi.IDataSource; import org.eclipse.birt.data.engine.odi.IEventHandler; import org.eclipse.birt.data.engine.odi.IPreparedDSQuery; import org.eclipse.birt.data.engine.odi.IQuery; import org.eclipse.birt.data.engine.odi.IResultClass; import org.eclipse.birt.data.engine.odi.IResultIterator; import org.eclipse.birt.data.engine.odi.IResultObjectEvent; import org.eclipse.birt.data.engine.olap.api.ICubeQueryResults; import org.eclipse.birt.data.engine.olap.script.JSCubeBindingObject; import org.eclipse.birt.data.engine.script.OnFetchScriptHelper; import org.eclipse.birt.data.engine.script.ScriptConstants; import org.mozilla.javascript.Context; import org.mozilla.javascript.Scriptable; import com.ibm.icu.text.Collator; /** * */ public abstract class QueryExecutor implements IQueryExecutor { private IBaseQueryDefinition baseQueryDefn; private AggregateTable aggrTable; // from PreparedQuery->PreparedDataSourceQuery->DataEngineImpl private Scriptable sharedScope; /** Externally provided query scope; can be null */ // from PreparedQuery->PreparedDataSourceQuery private Scriptable parentScope; // for query execution private Scriptable queryScope; private boolean isPrepared = false; private boolean isExecuted = false; private boolean loadFromCache; private Map queryAppContext; /** Query nesting level, 1 - outermost query */ private int nestedLevel = 1; /** Runtime data source and data set used by this instance of executor */ protected DataSourceRuntime dataSource; protected DataSetRuntime dataSet; protected IDataSource odiDataSource; protected IQuery odiQuery; /** Outer query's results; null if this query is not nested */ protected IQueryService tabularOuterResults; private IResultIterator odiResult; private IExecutorHelper parentHelper; private DataEngineSession session; protected List temporaryComputedColumns = new ArrayList( ); private static Logger logger = Logger.getLogger( QueryExecutor.class.getName( ) ); /** * @param sharedScope * @param baseQueryDefn * @param aggrTable */ QueryExecutor( Scriptable sharedScope, IBaseQueryDefinition baseQueryDefn, AggregateTable aggrTable, DataEngineSession session ) { Object[] params = { sharedScope, baseQueryDefn, aggrTable, session }; logger.entering( QueryExecutor.class.getName( ), "QueryExecutor", params ); this.sharedScope = sharedScope; this.baseQueryDefn = baseQueryDefn; this.aggrTable = aggrTable; this.session = session; logger.exiting( QueryExecutor.class.getName( ), "QueryExecutor" ); } /** * Provide the actual DataSourceRuntime used for the query. * * @return */ abstract protected DataSourceRuntime findDataSource( ) throws DataException; /** * Create a new instance of data set runtime * * @return */ abstract protected DataSetRuntime newDataSetRuntime( ) throws DataException; /** * Create a new unopened odiDataSource given the data source runtime * definition * * @return */ abstract protected IDataSource createOdiDataSource( ) throws DataException; /** * Create an empty instance of odi query * * @return */ abstract protected IQuery createOdiQuery( ) throws DataException; /** * Prepares the ODI query */ protected void prepareOdiQuery( ) throws DataException { } /** * * @throws DataException */ protected void dataSourceBeforeOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSource.beforeOpen( ); } } /** * * @throws DataException */ protected void dataSourceAfterOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSource.afterOpen( ); } } /** * * @throws DataException */ protected void dataSetBeforeOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.beforeOpen( ); } } /** * * @throws DataException */ protected void dataSetAfterOpen( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.afterOpen( ); } } /** * * @throws DataException */ protected void dataSetBeforeClose( ) throws DataException { if ( !this.loadFromCache ) { dataSet.beforeClose( ); } } /** * * @throws DataException */ protected void dataSetAfterClose( ) throws DataException { if ( !this.loadFromCache ) { this.dataSet.afterClose( ); } } /** * Executes the ODI query to reproduce a ODI result set * @param eventHandler * @param stopSign * @return */ abstract protected IResultIterator executeOdiQuery( IEventHandler eventHandler, StopSign stopSign ) throws DataException; /** * @param context */ void setAppContext( Map context ) { queryAppContext = context; } /** * Prepare Executor so that it is ready to execute the query * * @param outerRts * @param targetScope * @throws DataException */ void prepareExecution( IBaseQueryResults outerRts, Scriptable targetScope ) throws DataException { if ( isPrepared ) return; this.parentScope = targetScope; dataSource = findDataSource( ); if ( outerRts != null && ( outerRts instanceof IQueryService || outerRts instanceof ICubeQueryResults )) { if ( outerRts instanceof IQueryService ) { tabularOuterResults = ( (IQueryService) outerRts ); if ( tabularOuterResults.isClosed( ) ) { // Outer result is closed; invalid throw new DataException( ResourceConstants.RESULT_CLOSED ); } this.nestedLevel = tabularOuterResults.getNestedLevel( ); // TODO: check helper is null IExecutorHelper helper = tabularOuterResults.getExecutorHelper( ); this.setParentExecutorHelper( helper ); } else if( outerRts instanceof ICubeQueryResults ) { ExecutorHelper helper = new ExecutorHelper( null ); helper.setScriptable( new JSCubeBindingObject( ( (ICubeQueryResults) outerRts ).getCubeCursor( ) ) ); this.setParentExecutorHelper( helper ); } } // Create the data set runtime // Since data set runtime contains the execution result, a new data set // runtime is needed for each execute dataSet = newDataSetRuntime( ); assert dataSet != null; //For cached data set, we need not execute any scripts. loadFromCache = loadFromCache( ); dataSet.setFromCache( loadFromCache ); openDataSource( ); // Run beforeOpen script now so the script can modify the // DataSetRuntime properties dataSetBeforeOpen( ); // Let subclass create a new and empty intance of the appropriate // odi IQuery odiQuery = createOdiQuery( ); odiQuery.setDistinctValueFlag( dataSet.needDistinctValue( ) ); odiQuery.setQueryDefinition( this.baseQueryDefn ); odiQuery.setExprProcessor( new ExpressionProcessor( dataSet ) ); //Set the row fetch limit for the IQuery instance.The row fetch limit //is the number of rows that a data set can fetch from data source. if( dataSet.getDesign( ) != null ) { //When it is not a subquery, the property "row fetch limit" should be applied //to the query. odiQuery.setRowFetchLimit( dataSet.getDesign( ).getRowFetchLimit( ) ); } populateOdiQuery( ); prepareOdiQuery( ); isPrepared = true; } /** * * @return * @throws DataException */ private boolean loadFromCache( ) throws DataException { if( this.dataSource == null ) return false; if ( !( this.baseQueryDefn instanceof IQueryDefinition ) ) return false; return this.session.getDataSetCacheManager( ) .doesLoadFromCache( this.dataSource.getDesign( ), this.dataSet.getDesign( ), new ParameterUtil( this.tabularOuterResults == null ? null : this.tabularOuterResults.getQueryScope( ), this.dataSet, ( IQueryDefinition )this.baseQueryDefn, this.getQueryScope( ), session.getEngineContext( ).getScriptContext( )).resolveDataSetParameters( true ), this.queryAppContext ); } /** * Open the required DataSource. This method should be called after * "dataSource" is initialized by findDataSource() method. * * @throws DataException */ protected void openDataSource( ) throws DataException { assert odiDataSource == null; // Open the underlying data source // dataSource = findDataSource( ); if ( dataSource != null ) { // TODO: potential bug if ( !dataSource.isOpen( ) || session.getDataSetCacheManager( ).needsToCache( )) { // Data source is not open; create an Odi Data Source and open it // We should run the beforeOpen script now to give it a chance to modify // runtime data source properties dataSourceBeforeOpen( ); // Let subclass create a new unopened odi data source odiDataSource = createOdiDataSource( ); // Passes thru the prepared query executor's // context to the new odi data source odiDataSource.setAppContext( queryAppContext ); // Open the odi data source dataSource.openOdiDataSource( odiDataSource ); dataSourceAfterOpen( ); } else { // Use existing odiDataSource created for the data source runtime odiDataSource = dataSource.getOdiDataSource( ); // Passes thru the prepared query executor's // current context to existing data source odiDataSource.setAppContext( queryAppContext ); } } } /** * Populates odiQuery with this query's definitions * * @throws DataException */ protected void populateOdiQuery( ) throws DataException { assert odiQuery != null; assert this.baseQueryDefn != null; // Set grouping populateGrouping( session.getEngineContext( ).getScriptContext( ).getContext( ) ); // Set sorting populateSorting( ); // set fetch event populateFetchEvent( session.getEngineContext( ).getScriptContext( ) ); // specify max rows the query should fetch odiQuery.setMaxRows( this.baseQueryDefn.getMaxRows( ) ); prepareCacheQuery( this.odiQuery ); } /** * TODO: enhance me, this is only a temp logic * Set temporary computed columns to DataSourceQuery where cache is used */ protected void prepareCacheQuery( IQuery odiQuery ) { if ( temporaryComputedColumns != null && temporaryComputedColumns.size( ) > 0 ) { if ( odiQuery instanceof org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery ) { ( (org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery) odiQuery ).setTempComputedColumn( this.temporaryComputedColumns ); } else if ( odiQuery instanceof org.eclipse.birt.data.engine.executor.dscache.CandidateQuery ) { ( (org.eclipse.birt.data.engine.executor.dscache.CandidateQuery) odiQuery ).setTempComputedColumn( this.temporaryComputedColumns ); } } } /** * Populate grouping to the query. * * @param cx * @throws DataException */ private void populateGrouping( Context cx ) throws DataException { List groups = this.baseQueryDefn.getGroups( ); if ( groups != null && !groups.isEmpty( ) ) { IQuery.GroupSpec[] groupSpecs = new IQuery.GroupSpec[groups.size( )]; Iterator it = groups.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IGroupDefinition src = (IGroupDefinition) it.next( ); validateGroupExpression( src ); String expr = getGroupKeyExpression( src ); String groupName = populateGroupName( i, expr ); int dataType = getColumnDataType( cx, expr ); IQuery.GroupSpec dest = QueryExecutorUtil.groupDefnToSpec( cx, src, expr, groupName, -1, dataType, this.baseQueryDefn.getQueryExecutionHints( ) == null ? true : this.baseQueryDefn.getQueryExecutionHints( ) .doSortBeforeGrouping( ) ); groupSpecs[i] = dest; this.temporaryComputedColumns.add( getComputedColumnInstance( cx, groupSpecs[i].getInterval( ), src, expr, groupName, dest, dataType) ); } odiQuery.setGrouping( Arrays.asList( groupSpecs ) ); } } /** * Validating the group expression. * * @param src * @throws DataException */ private void validateGroupExpression( IGroupDefinition src ) throws DataException { if ( ( src.getKeyColumn( ) == null || src.getKeyColumn( ) .trim( ) .length( ) == 0 ) && ( src.getKeyExpression( ) == null || src.getKeyExpression( ) .trim( ) .length( ) == 0 ) ) throw new DataException( ResourceConstants.BAD_GROUP_EXPRESSION ); } /** * Populate the group name according to the given expression. * * @param i * @param expr * @return */ private String populateGroupName( int i, String expr ) { String groupName; if ( expr.trim( ).equalsIgnoreCase( "row[0]" ) || expr.trim( ).equalsIgnoreCase( "row._rowPosition" ) || expr.trim( ).equalsIgnoreCase( "dataSetRow[0]" ) || expr.trim( ) .equalsIgnoreCase( "dataSetRow._rowPosition" ) ) { groupName = "_{$TEMP_GROUP_" + i + "ROWID$}_"; } else { groupName = "_{$TEMP_GROUP_" + i + "$}_"; } return groupName; } /** * Get the computed column instance according to the group type.If group has * interval, return GroupComputedColumn, otherwise return normal computed * column. * * @param cx * @param groupSpecs * @param i * @param src * @param expr * @param groupName * @param dest * @return * @throws DataException */ private IComputedColumn getComputedColumnInstance( Context cx, int interval, IGroupDefinition src, String expr, String groupName, IQuery.GroupSpec dest, int dataType) throws DataException { if ( dest.getInterval( ) != IGroupDefinition.NO_INTERVAL ) { return new GroupComputedColumn( groupName, expr, QueryExecutorUtil.getTempComputedColumnType( interval ), GroupCalculatorFactory.getGroupCalculator( src.getInterval( ), src.getIntervalStart( ), src.getIntervalRange( ), dataType) ); } else { return new ComputedColumn( groupName, expr, dataType ); } } /** * Populate the sortings in a query. * * @throws DataException */ private void populateSorting( ) throws DataException { List sorts = this.baseQueryDefn.getSorts( ); if ( sorts != null && !sorts.isEmpty( ) ) { IQuery.SortSpec[] sortSpecs = new IQuery.SortSpec[sorts.size( )]; Iterator it = sorts.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { ISortDefinition src = (ISortDefinition) it.next( ); int sortIndex = -1; String sortKey = src.getColumn( ); if ( sortKey == null ) sortKey = src.getExpression( ).getText( ); else { sortKey = getColumnRefExpression( sortKey ); } temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_SORT_" + i + "$}_", sortKey, getExpressionDataType( sortKey ) ) ); sortIndex = -1; sortKey = String.valueOf( "_{$TEMP_SORT_" + i + "$}_"); IQuery.SortSpec dest = new IQuery.SortSpec( sortIndex, sortKey, src.getSortDirection( ) == ISortDefinition.SORT_ASC, src.getSortStrength( ) == -1? null:Collator.getInstance( )); sortSpecs[i] = dest; } odiQuery.setOrdering( Arrays.asList( sortSpecs ) ); } } /** * * @param expression * @return * @throws DataException */ private int getExpressionDataType( String expression ) throws DataException { try { if( expression == null ) return DataType.ANY_TYPE; String bindingName = ExpressionUtil.getColumnBindingName( expression ); if( bindingName == null ) return DataType.ANY_TYPE; if ( bindingName.equals( ScriptConstants.ROW_NUM_KEYWORD ) ) return DataType.INTEGER_TYPE; Object binding = this.baseQueryDefn.getBindings( ).get( bindingName ); if( binding == null ) return DataType.ANY_TYPE; int dataType = ( (IBinding) binding ).getDataType( ); if( dataType != DataType.UNKNOWN_TYPE ) return dataType; else return DataType.ANY_TYPE; } catch ( BirtException e ) { throw DataException.wrap( e ); } } /** * * @param cx * @throws DataException */ private void populateFetchEvent( ScriptContext cx ) throws DataException { List dataSetFilters = new ArrayList( ); List queryFilters = new ArrayList( ); List aggrFilters = new ArrayList( ); List dataSetAggrFilters = new ArrayList( ); if ( dataSet.getFilters( ) != null ) { Map bindings = createBindingFromComputedColumn( dataSet.getComputedColumns( )); for ( int i = 0; i < dataSet.getFilters( ).size( ); i++ ) { if ( QueryExecutorUtil.isAggrFilter( (IFilterDefinition) dataSet.getFilters( ) .get( i ), bindings ) ) { dataSetAggrFilters.add( dataSet.getFilters( ).get( i ) ); } else { dataSetFilters.add( dataSet.getFilters( ).get( i ) ); } } } if ( this.baseQueryDefn.getFilters( ) != null ) { for ( int i = 0; i < this.baseQueryDefn.getFilters( ).size( ); i++ ) { if ( QueryExecutorUtil.isAggrFilter( (IFilterDefinition) this.baseQueryDefn.getFilters( ) .get( i ), this.baseQueryDefn.getBindings( ) ) ) { aggrFilters.add( this.baseQueryDefn.getFilters( ).get( i ) ); } else { queryFilters.add( this.baseQueryDefn.getFilters( ).get( i ) ); } } } //When prepare filters, the temporaryComputedColumns would also be effect. List multipassFilters = prepareFilters( cx, dataSetFilters, queryFilters, temporaryComputedColumns ); //******************populate the onFetchEvent below**********************/ List computedColumns = null; // set computed column event computedColumns = this.dataSet.getComputedColumns( ); if ( computedColumns == null ) computedColumns = new ArrayList( ); if ( computedColumns.size( ) > 0 || temporaryComputedColumns.size( ) > 0 ) { IResultObjectEvent objectEvent = new ComputedColumnHelper( this.dataSet, computedColumns, temporaryComputedColumns, cx ); odiQuery.addOnFetchEvent( objectEvent ); this.dataSet.getComputedColumns( ) .addAll( temporaryComputedColumns ); } if ( dataSet.getEventHandler( ) != null ) { OnFetchScriptHelper event = new OnFetchScriptHelper( dataSet ); odiQuery.addOnFetchEvent( event ); } if ( dataSetFilters.size( ) + queryFilters.size( ) + multipassFilters.size( ) + aggrFilters.size( ) + dataSetAggrFilters.size( ) > 0 ) { IResultObjectEvent objectEvent = new FilterByRow( dataSetFilters, queryFilters, multipassFilters, aggrFilters, dataSetAggrFilters, dataSet ); odiQuery.addOnFetchEvent( objectEvent ); } } /** * * @param computedColumns * @return * @throws DataException */ private Map<String, IBinding> createBindingFromComputedColumn( List computedColumns ) throws DataException { Map<String, IBinding> result = new HashMap<String, IBinding>(); if( computedColumns == null || computedColumns.size( ) == 0 ) return result; for( Object computedColumn: computedColumns ) { IComputedColumn cc = (IComputedColumn)computedColumn; IBinding binding = new Binding( cc.getName( ) ); binding.setExpression( cc.getExpression( ) ); binding.setAggrFunction( cc.getAggregateFunction( ) ); result.put( cc.getName( ), binding ); } return result; } /** * get the data type of a expression * @param cx * @param expr * @return * @throws DataException */ private int getColumnDataType( Context cx, String expr ) throws DataException { String columnName = QueryExecutorUtil.getColInfoFromJSExpr( cx, expr ) .getColumnName( ); if ( columnName == null ) { return DataType.UNKNOWN_TYPE; } if ( columnName.equals( ScriptConstants.ROW_NUM_KEYWORD ) ) { return DataType.INTEGER_TYPE; } Object baseExpr = ( this.baseQueryDefn.getBindings( ).get( columnName ) ); if ( baseExpr == null ) { return DataType.UNKNOWN_TYPE; } int dataType = ( (IBinding) baseExpr ).getExpression( ).getDataType( ); if( dataType == DataType.UNKNOWN_TYPE ) return DataType.ANY_TYPE; return dataType; } /** * @param src * @return */ private String getGroupKeyExpression( IGroupDefinition src ) { String expr = src.getKeyColumn( ); if ( expr == null ) { expr = src.getKeyExpression( ); } else { expr = getColumnRefExpression( expr ); } return expr; } /** * * @param expr * @return */ private String getColumnRefExpression( String expr ) { return ExpressionUtil.createJSRowExpression( expr ); } void setParentExecutorHelper( IExecutorHelper helper ) { this.parentHelper = helper; } /** * * @param cx * @param dataSetFilters * @param queryFilters * @param temporaryComputedColumns * @return * @throws DataException */ private List prepareFilters( ScriptContext cx, List dataSetFilters, List queryFilters, List temporaryComputedColumns ) throws DataException { List result = new ArrayList( ); List allFilter = new ArrayList(); allFilter.addAll( dataSetFilters ); allFilter.addAll( queryFilters ); prepareFilter( cx, allFilter, temporaryComputedColumns, result ); return result; } /** * * @param cx * @param dataSetFilters * @param temporaryComputedColumns * @param result * @throws DataException */ private void prepareFilter( ScriptContext cx, List dataSetFilters, List temporaryComputedColumns, List result ) throws DataException { if ( dataSetFilters != null && !dataSetFilters.isEmpty( ) ) { Iterator it = dataSetFilters.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IFilterDefinition src = (IFilterDefinition) it.next( ); IBaseExpression expr = src.getExpression( ); if ( isGroupFilter( src ) ) { ConditionalExpression ce = ( (ConditionalExpression) expr ); String exprText = ce.getExpression( ).getText( ); ColumnInfo columnInfo = QueryExecutorUtil.getColInfoFromJSExpr( cx.getContext( ), exprText ); int index = columnInfo.getColumnIndex( ); String name = columnInfo.getColumnName( ); if ( name == null && index < 0 ) { // If failed to treate filter key as a column reference // expression // then treat it as a computed column expression temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_FILTER_" + i + "$}_", exprText, DataType.ANY_TYPE ) ); it.remove( ); result.add( new FilterDefinition( new ConditionalExpression( new ScriptExpression( String.valueOf( "dataSetRow[\"_{$TEMP_FILTER_" + i + "$}_\"]" ) ), ce.getOperator( ), ce.getOperand1( ), ce.getOperand2( ) ) ) ); } } } } } /** * * @param filter * @return * @throws DataException */ private boolean isGroupFilter( IFilterDefinition filter ) throws DataException { IBaseExpression expr = filter.getExpression( ); if ( expr instanceof IConditionalExpression ) { if ( !ExpressionCompilerUtil.isValidExpressionInQueryFilter( expr, session.getEngineContext( ) .getScriptContext( ) .getContext( ) ) ) throw new DataException( ResourceConstants.INVALID_DEFINITION_IN_FILTER, new Object[]{ ( (IConditionalExpression) expr ).getExpression( ) .getText( ) } ); try { if ( odiQuery instanceof BaseQuery ) { return ( (BaseQuery) odiQuery ).getExprProcessor( ) .hasAggregation( expr ); } } catch ( DataException e ) { return true; } } return false; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getResultMetaData() */ public IResultMetaData getResultMetaData( ) throws DataException { assert odiQuery instanceof IPreparedDSQuery || odiQuery instanceof ICandidateQuery || odiQuery instanceof JointDataSetQuery; if ( odiQuery instanceof IPreparedDSQuery ) { if ( ( (IPreparedDSQuery) odiQuery ).getResultClass( ) != null ) return new ColumnBindingMetaData( baseQueryDefn, ( (IPreparedDSQuery) odiQuery ).getResultClass( ) ); else return null; } else if ( odiQuery instanceof JointDataSetQuery ) { return new ColumnBindingMetaData( baseQueryDefn, ( (JointDataSetQuery) odiQuery ).getResultClass( ) ); } else { return new ColumnBindingMetaData( baseQueryDefn, ( (ICandidateQuery) odiQuery ).getResultClass( ) ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getOdiResultClass() */ public IResultClass getOdiResultClass( ) throws DataException { assert odiQuery instanceof IPreparedDSQuery || odiQuery instanceof ICandidateQuery || odiQuery instanceof JointDataSetQuery; if ( odiQuery instanceof IPreparedDSQuery ) { return ( (IPreparedDSQuery) odiQuery ).getResultClass( ); } else if ( odiQuery instanceof JointDataSetQuery ) { return ( (JointDataSetQuery) odiQuery ).getResultClass( ); } else { return ( (ICandidateQuery) odiQuery ).getResultClass( ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#execute() */ public void execute( IEventHandler eventHandler, StopSign stopSign ) throws DataException { logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Start to execute" ); if ( this.isExecuted ) return; ExecutorHelper helper = new ExecutorHelper( this.parentHelper ); eventHandler.setExecutorHelper( helper ); // Execute the query odiResult = executeOdiQuery( eventHandler, stopSign ); helper.setScriptable( this.dataSet.getJSResultRowObject( ) ); resetComputedColumns( ); // Bind the row object to the odi result set this.dataSet.setResultSet( odiResult, false ); // Calculate aggregate values //this.aggrTable.calculate( odiResult, getQueryScope( ) ); this.isExecuted = true; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Finish executing" ); } /** * reset computed columns */ private void resetComputedColumns( ) { List l = this.getDataSet( ).getComputedColumns( ); if ( l != null ) l.removeAll( this.temporaryComputedColumns ); } /* * Closes the executor; release all odi resources * * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#close() */ public void close( ) { if ( odiQuery == null ) { // already closed logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); return; } // Close the data set and associated odi query try { dataSetBeforeClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } if ( odiResult != null ) { try { odiResult.close( ); } catch ( DataException e1 ) { // TODO Auto-generated catch block e1.printStackTrace( ); } } odiQuery.close( ); try { dataSet.close( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } odiQuery = null; odiDataSource = null; odiResult = null; queryScope = null; isPrepared = false; isExecuted = false; // Note: reset dataSet and dataSource only after afterClose() is executed, since // the script may access these two objects try { dataSetAfterClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } dataSet = null; dataSource = null; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSet() */ public DataSetRuntime getDataSet( ) { return dataSet; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getSharedScope() */ public Scriptable getSharedScope( ) { return this.sharedScope; } /** * Gets the Javascript scope for evaluating expressions for this query * * @return */ public Scriptable getQueryScope( ) { if ( queryScope == null ) { // Set up a query scope. All expressions are evaluated against the // Data set JS object as the prototype (so that it has access to all // data set properties). It uses a subscope of the externally provided // parent scope, or the global shared scope queryScope = newSubScope( parentScope ); queryScope.setPrototype( dataSet.getJSDataSetObject( ) ); } return queryScope; } /** * Creates a subscope within parent scope * @param parentAndProtoScope parent scope. If null, the shared top-level scope is used as parent */ private Scriptable newSubScope( Scriptable parentAndProtoScope ) { if ( parentAndProtoScope == null ) parentAndProtoScope = sharedScope; Scriptable scope = session.getEngineContext( ) .getScriptContext( ) .getContext( ) .newObject( parentAndProtoScope ); scope.setParentScope( parentAndProtoScope ); scope.setPrototype( parentAndProtoScope ); return scope; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedLevel() */ public int getNestedLevel( ) { return this.nestedLevel; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSourceInstanceHandle() */ public IDataSourceInstanceHandle getDataSourceInstanceHandle( ) { return this.dataSource; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getJSAggrValueObject() */ public Scriptable getJSAggrValueObject( ) { return this.aggrTable.getJSAggrValueObject( ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedDataSets(int) */ public DataSetRuntime[] getNestedDataSets( int nestedCount ) { return tabularOuterResults == null ? null : tabularOuterResults.getDataSetRuntime( nestedCount ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getOdiResultSet() */ public IResultIterator getOdiResultSet( ) { return this.odiResult; } /** * @param evaluateValue * @return * @throws DataException */ protected Collection resolveDataSetParameters( boolean evaluateValue ) throws DataException { return new ParameterUtil( this.tabularOuterResults == null ? null:this.tabularOuterResults.getQueryScope( ), this.getDataSet( ), (IQueryDefinition) this.baseQueryDefn, this.getQueryScope( ), session.getEngineContext( ).getScriptContext( )).resolveDataSetParameters( evaluateValue ); } /* * (non-Javadoc) * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getAppContext() */ public Map getAppContext() { return this.queryAppContext; } public DataEngineSession getSession() { return this.session; } }
CheckIn:Fix a junit test failure
data/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/QueryExecutor.java
CheckIn:Fix a junit test failure
<ide><path>ata/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/QueryExecutor.java <ide> List queryFilters, List temporaryComputedColumns ) throws DataException <ide> { <ide> List result = new ArrayList( ); <del> List allFilter = new ArrayList(); <add> /*List allFilter = new ArrayList(); <ide> allFilter.addAll( dataSetFilters ); <ide> allFilter.addAll( queryFilters ); <ide> prepareFilter( cx, allFilter, temporaryComputedColumns, result ); <add> */ <add> <add> prepareFilter( cx, dataSetFilters,temporaryComputedColumns, result ); <add> prepareFilter( cx, queryFilters,temporaryComputedColumns, result ); <ide> return result; <ide> } <ide> <ide> <ide> if ( name == null && index < 0 ) <ide> { <add> int currentIndex = result.size( ); <ide> // If failed to treate filter key as a column reference <ide> // expression <ide> // then treat it as a computed column expression <ide> temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_FILTER_" <del> + i + "$}_", <add> + currentIndex + "$}_", <ide> exprText, <ide> DataType.ANY_TYPE ) ); <ide> it.remove( ); <ide> result.add( new FilterDefinition( new ConditionalExpression( new ScriptExpression( String.valueOf( "dataSetRow[\"_{$TEMP_FILTER_" <del> + i + "$}_\"]" ) ), <add> + currentIndex + "$}_\"]" ) ), <ide> ce.getOperator( ), <ide> ce.getOperand1( ), <ide> ce.getOperand2( ) ) ) );
JavaScript
apache-2.0
c7e67bbec133ca465b1a4c1000b0eae5e16d4a76
0
opentext/storyteller,opentext/storyteller,opentext/storyteller,opentext/storyteller
// Copyright (c) 2017 Open Text. All Rights Reserved. /*jslint bitwise:true */ 'use strict'; const util = require('util'); const streams = require('streams'); const stl = require('stl'); const range = require('range'); const enums = { item: { TABLE: 5, IMAGE: 6, TEXT: 14 }, content: { NULL: 0, HYPERLINK_START: -252, OBJECT_START: -251, PARAGRAPH_BREAK: -244, SUPERSCRIPT_START: -240, SUBSCRIPT_START: -239, HYPERLINK_END: -109, OBJECT_END: -106, CONTENT_END: -64, COLOR_CHANGE: -63, FONT_CHANGE: -62, SUBSCRIPT_END: -58, SUPERSCRIPT_END: -59 }, list: { NONE: 0, BULLETS: 1, NUMBERING: 2 }, numbering: { 0: '1.', // decimal 2: 'A.', // upper-alpha 3: 'a.', // lower-alpha 4: 'R.', // upper-roman 5: 'r.', // lower-roman 6: '1)', 7: 'A)', 8: 'a)', 9: 'R)', 10: 'r)', 15: '(1)', 16: '(A)', 17: '(a)', 18: '(R)', 19: '(r)', }, pen: { SOLID: 0, DASHED: 1, DOTTED: 3 }, valign: { TOP: 0, CENTER: 1, BOTTOM: 2 }, segmentpos: { TOP: 1, RIGHT: 2, BOTTOM: 4, LEFT: 8 }, defaults: { bullets: ['•', '◦', '▪'], numberings: ['1.', '1.', 'r.', '1)'] } }; function getKeyByValue(object, value) { return Object.keys(object).find(key => object[key] === value); } function simple_stack(item) { var items = []; if (item !== undefined) items.push(item); return { push: (item) => items.push(item), pop: () => items.pop(), top: () => items[items.length-1], length: () => items.length }; } ///////////////////////////////////////////////////////////////////////////////////////////////////// // // JSON -> STL // ///////////////////////////////////////////////////////////////////////////////////////////////////// function make_indenter(indent, default_indent) { if (util.isFunction(indent)) { return indent; } if (indent) { if (util.isBoolean(indent)) { indent = default_indent || ' '; } if (util.isNumber(indent)) { indent = ' '.repeat(indent); } if (util.isString(indent)) { return () => indent; } throw new Error("Unsupported indent: " + indent); } return () => ''; } function xml_writer(stream, indenter) { var tags = []; var no_children; var content = ''; function format_start(tag, attrs) { attrs = attrs || {}; var result = '<' + tag; var keys = Object.keys(attrs); if (keys.length) { result += ' ' + keys.map(function(key) { return key + '="' + attrs[key] + '"'; }).join(' '); } return result + '>'; } function format_end(tag) { return '</' + tag + '>'; } function flush() { content += cache; cache = ''; } function start(tag, attrs) { var line = format_start(tag, attrs); var indent = indenter(tag, tags, true); if (indent) { line = '\n' + indent.repeat(tags.length) + line; } content += line; no_children = true; tags.push(tag); } function end(tag) { var top = tags.pop(); if (top !== tag) { throw new Error("Tag mismatch (trying to close '" + tag + "' while top element is '" + top + "')"); } if (no_children) { content = content.slice(0, -1) + '/>'; no_children = false; } else { var line = format_end(tag); var indent = indenter(tag, tags, false); if (indent) { line = '\n' + indent.repeat(tags.length) + line; } content += line; } } function text(data) { if (!tags.length) { throw new Error("Cannot write text '" + data + "' outside elements"); } no_children = false; content += data; } function finalize() { stream.write(content); } return { start: start, end: end, text: text, finalize: finalize }; } function stl_writer(stream, indent) { var writer = xml_writer(stream, make_indenter(indent)); function start(tag, attrs) { if (attrs && attrs.style === '') { delete attrs.style; } writer.start('stl:' + tag, attrs); } function end(tag) { writer.end('stl:' + tag); } function text(data) { writer.text(data); } function finalize() { end('stl'); writer.finalize(); writer = null; } var attrs = { 'xmlns:stl': stl.namespaces.stl, version: stl.version }; start('stl', attrs); return { start: start, end: end, text: text, finalize: finalize }; } function css_format(css) { return Object.keys(css).filter(function (key) { return css[key] !== null; }).map(function (key) { return key + ': ' + css[key]; }).join('; '); } function css_converter(resolution, options) { function convert_color(col, map_black_as_null) { function hex(d) { return ('0' + (d.toString(16))).slice(-2).toUpperCase(); } if (col.m_eColorModel !== 0) { throw new Error("Unsupported color model: " + col.m_eColorModel); } var r = col.m_lColor & 0xff; var g = (col.m_lColor >> 8) & 0xff; var b = (col.m_lColor >> 16) & 0xff; return (map_black_as_null && !r && !b && !g) ? null : '#' + hex(r) + hex(g) + hex(b); } function convert_length(v) { return v / resolution + 'in'; } function convert_pos(rect, attrs) { attrs = attrs || {}; if (rect.left) { attrs.x = convert_length(rect.left); } if (rect.top) { attrs.y = convert_length(rect.top); } return attrs; } function convert_dim(rect, attrs) { attrs = attrs || {}; if (rect.right) { attrs.w = convert_length(rect.right - rect.left); } if (rect.bottom) { attrs.h = convert_length(rect.bottom - rect.top); } return attrs; } function convert_bbox(rect, attrs) { attrs = attrs || {}; convert_pos(rect, attrs); convert_dim(rect, attrs); return attrs; } function convert_rowbox(row, attrs) { attrs = attrs || {}; attrs.h = convert_length(row.m_iHeight); if (!row.m_bFixedSize) { var css = { '-stl-shape-resize': 'free 0pt max 0pt max' }; attrs.style = css_format(css); } return attrs; } function css_reset() { return { 'color': null, 'font-family': null, 'font-size': null, 'font-weight': null, 'font-style': null, 'text-decoration': null, 'text-align': null, 'margin-left': null, 'margin-right': null, 'margin-top': null, 'margin-bottom': null, 'padding-left': null, 'padding-right': null, 'padding-top': null, 'padding-bottom': null, 'border': null, 'border-top': null, 'border-right': null, 'border-bottom': null, 'border-left': null, 'fill': null, '-stl-list-counter': null, '-stl-list-mask': null, '-stl-list-level': null, '-stl-tabs': null, '-stl-shape-resize': null, '-stl-alignment': null //'vertical-align': null // handled specially (@todo fixit) }; } function convert_parstyle(ps, css) { css = css || css_reset(); var alignments = ['left', 'right', 'center', 'justify']; if (ps.iJustification) { css['text-align'] = alignments[ps.iJustification]; } if (ps.iLeftIndent) { css['margin-left'] = convert_length(ps.iLeftIndent); } if (ps.iRightIndent) { css['margin-right'] = convert_length(ps.iRightIndent); } if (ps.iSpaceBefore) { css['margin-top'] = convert_length(ps.iSpaceBefore); } if (ps.iSpaceAfter) { css['margin-bottom'] = convert_length(ps.iSpaceAfter); } var level; var format; switch (ps.iNumbering) { case enums.list.NONE: break; case enums.list.BULLETS: level = ps.iNumberIndent - 1; format = enums.defaults.bullets[level % 3]; css['-stl-list-mask'] = format + '\\9'; css['-stl-list-level'] = level; css['-stl-tabs'] = convert_length(250 * (level + 1)); css['margin-left'] = convert_length(250 * level); break; case enums.list.NUMBERING: level = ps.iNumberIndent - 1; format = enums.numbering[ps.eNumberType]; if (format === undefined) { throw new Error("Unknown eNumberType: " + ps.eNumberType); } css['-stl-list-counter'] = 'default_counter'; css['-stl-list-mask'] = '%' + level + '!' + format + '\\9'; css['-stl-list-level'] = level; css['-stl-tabs'] = convert_length(250 * (level + 1)); css['margin-left'] = convert_length(250 * level); break; default: throw new Error('Unsupported numbering mode: ' + ps.iNumbering); } return css; } function convert_font(name) { return options.fonts ? options.fonts(name) : name; } function convert_charstyle(cs, css) { css = css || css_reset(); css['font-family'] = convert_font(cs.strName); css['font-size'] = cs.iFontHeight10X / 10 + 'pt'; if (cs.bBold) { css['font-weight'] = 'bold'; } if (cs.bItalic) { css['font-style'] = 'italic'; } if (cs.bUnderline) { css['text-decoration'] = 'underline'; } if (cs.bStrikeThru) { css['text-decoration'] = 'line-through'; } return css; } function convert_pen(thickness, style, color) { function pen_style(src) { var key = getKeyByValue(enums.pen, src); if (!key) { throw new Error('Unsupported pen style: ' + src); } return key.toLowerCase(); } thickness = thickness ? convert_length(thickness) : '1px'; return thickness + ' ' + pen_style(style) + ' ' + convert_color(color); } function convert_padding(draw, css) { css = css || css_reset(); if (draw.m_iLeftMargin) { css['padding-left'] = convert_length(draw.m_iLeftMargin); } if (draw.m_iRightMargin) { css['padding-right'] = convert_length(draw.m_iRightMargin); } if (draw.m_iTopMargin) { css['padding-top'] = convert_length(draw.m_iTopMargin); } if (draw.m_iBottomMargin) { css['padding-bottom'] = convert_length(draw.m_iBottomMargin); } return css; } function convert_item_style(draw, css) { css = css || css_reset(); if (draw.m_bPen === true) { css.border = convert_pen(draw.m_iPenWidth, draw.m_iPenStyle, draw.m_clrPen); } if (draw.m_bBackGroundTransparent === false) { css.fill = convert_color(draw.m_clrBackGround); } convert_padding(draw, css); if (draw.m_bAutoSizeX || draw.m_bAutoSizeY) { var x = draw.m_bAutoSizeX ? 'max' : '0pt'; var y = draw.m_bAutoSizeY ? 'max' : '0pt'; css['-stl-shape-resize'] = ['free', x, y, x, y].join(' '); } switch (draw.m_eVertJust) { case undefined: case enums.valign.TOP: css['-stl-alignment'] = null; break; case enums.valign.CENTER: css['-stl-alignment'] = 'vertical 0.5'; break; case enums.valign.BOTTOM: css['-stl-alignment'] = 'vertical 1'; break; default: throw new Error("Unsupported vertical justification: " + draw.m_eVertJust); } return css; } function convert_cell_border(cell, row, column, css) { css = css || css_reset(); if (row.m_iLineAbove !== -1) { css['border-top'] = convert_pen(row.m_iWeightAbove, row.m_iLineAbove, row.m_clrAbove); } if (row.m_iLineBelow !== -1) { css['border-bottom'] = convert_pen(row.m_iWeightBelow, row.m_iLineBelow, row.m_clrBelow); } if (column.m_iLineLeft !== -1) { css['border-left'] = convert_pen(column.m_iWeightLeft, column.m_iLineLeft, column.m_clrLeft); } if (column.m_iLineRight !== -1) { css['border-right'] = convert_pen(column.m_iWeightRight, column.m_iLineRight, column.m_clrRight); } cell.m_FrameSegShape.m_ppSegments.forEach(function (segment) { if (segment.m_estType === 1 && segment.m_bVisible) { var pen = convert_pen(segment.m_iLineWeight, segment.m_iLineStyle, segment.m_clrLine); switch (segment.m_elpPosition) { case enums.segmentpos.TOP: css['border-top'] = pen; break; case enums.segmentpos.RIGHT: css['border-right'] = pen; break; case enums.segmentpos.BOTTOM: css['border-bottom'] = pen; break; case enums.segmentpos.LEFT: css['border-left'] = pen; break; default: // is it a mask? throw new Error("Unsupported segment position: " + segment.m_elpPosition); } } }); return css; } return { length: convert_length, pos: convert_pos, dim: convert_dim, bbox: convert_bbox, rowbox: convert_rowbox, color: convert_color, parstyle: convert_parstyle, charstyle: convert_charstyle, item_style: convert_item_style, cell_border: convert_cell_border }; } function content_inserter(writer) { const states = { CLOSED: 0, CACHED: 1, OPEN: 2 }; var style = { state: states.CLOSED, css: {} }; var blackspace = null; var paragraph = null; function padding() { // generate empty span to avoid whitespace trim writer.start('span'); writer.end('span'); } function flush() { if (blackspace === false && paragraph) { padding(); } if (style.state === states.OPEN) { writer.end('span'); } style.state = states.CACHED; } function style_change(css) { var modified = false; Object.keys(css).forEach(function (key) { var value = css[key]; if (style.css[key] !== value) { style.css[key] = value; modified = true; } }); if (modified) { flush(); } } function push(tag, attrs) { flush(); blackspace = null; writer.start(tag, attrs); } function pop(tag) { flush(); writer.end(tag); } function paragraph_start(css) { if (paragraph === true) { throw new Error("Paragraph nesting not supported"); } push('p', {style: css_format(css)}); paragraph = true; } function paragraph_end() { if (paragraph === null) { return; } if (paragraph === false) { throw new Error("Paragraph already closed"); } pop('p'); paragraph = false; } function character(ch) { if (style.state === states.CACHED) { writer.start('span', {style: css_format(style.css)}); style.state = states.OPEN; blackspace = null; } if (/\s/.test(ch)) { if (!blackspace) { padding(); } blackspace = false; } else { blackspace = true; } writer.text(ch); } return { style_change: style_change, paragraph_start: paragraph_start, paragraph_end: paragraph_end, character: character, push: push, pop: pop }; } function build_stl(contents, writer, options) { var converter = css_converter(contents.m_lResolution, options); var convert_object; function convert_content(draw, inserter) { inserter = inserter || content_inserter(writer); draw.m_cChars.forEach(function (code, index) { var cmd = draw.m_sXPos[index]; switch (cmd) { case enums.content.HYPERLINK_START: inserter.push('scope', {'hyperlink': draw.m_Links[code].msLink}); inserter.push('story'); break; case enums.content.OBJECT_START: convert_object(draw.m_Objs[code].m_iObjType, draw.m_pObjs[code], inserter); break; case enums.content.PARAGRAPH_BREAK: inserter.paragraph_end(); inserter.paragraph_start(converter.parstyle(draw.m_ParaValues[draw.m_sXPos[index + 1]])); break; case enums.content.SUPERSCRIPT_START: inserter.style_change({'vertical-align': 'super'}); break; case enums.content.SUBSCRIPT_START: inserter.style_change({'vertical-align': 'sub'}); break; case enums.content.HYPERLINK_END: inserter.pop('story'); inserter.pop('scope'); break; case enums.content.OBJECT_END: break; case enums.content.CONTENT_END: inserter.paragraph_end(); break; case enums.content.COLOR_CHANGE: inserter.style_change({'color': converter.color(draw.m_Colors[code], true)}); break; case enums.content.FONT_CHANGE: inserter.style_change(converter.charstyle(draw.m_TextFonts[code])); break; case enums.content.SUBSCRIPT_END: case enums.content.SUPERSCRIPT_END: inserter.style_change({'vertical-align': null}); break; default: if (cmd >= 0 && code > 0) { inserter.character(String.fromCharCode(code)); } break; } }); } function convert_table(draw, inserter) { function convert_row(row, r) { inserter.push('row', converter.rowbox(row)); draw.m_Columns.forEach(function (column, c) { var cell = draw.m_Cells.find(function (cell) { return cell.m_iColumn === c && cell.m_iRow === r; }); var attrs = {}; if (r === 0) { attrs.w = converter.length(column.m_iWidth); } var css = converter.item_style(cell.m_pTextDraw); converter.cell_border(cell, row, column, css); attrs.style = css_format(css); inserter.push('cell', attrs); convert_content(cell.m_pTextDraw); inserter.pop('cell'); }); inserter.pop('row'); } // we do not convert table width & height, we convert row/column dimensions instead var attrs = converter.pos(draw.m_rectPosition); var css = converter.item_style(draw); attrs.style = css_format(css); inserter.push('table', attrs); inserter.push('story'); draw.m_Rows.forEach(convert_row); inserter.pop('story'); inserter.pop('table'); } function convert_image(draw, inserter) { var attrs = converter.bbox(draw.m_rectPosition); var uri = 'cas:' + draw.m_pDbBitmap.m_strCASId; attrs.src = options.uris ? options.uris(uri) : uri; inserter.push('image', attrs); inserter.pop('image'); } function convert_text(draw, inserter) { var attrs = converter.bbox(draw.m_rectPosition); var css = converter.item_style(draw); attrs.style = css_format(css); inserter.push('text', attrs); inserter.push('story'); convert_content(draw); inserter.pop('story'); inserter.pop('text'); } convert_object = function (type, draw, inserter) { inserter = inserter || content_inserter(writer); switch (type) { case enums.item.TABLE: convert_table(draw, inserter); break; case enums.item.IMAGE: convert_image(draw, inserter); break; case enums.item.TEXT: convert_text(draw, inserter); break; default: throw new Error("Unsupported object type: " + type); } }; function convert_text_message(contents) { var draw = contents.m_pTextDraw; var attrs = converter.bbox(draw.m_rectPosition); writer.start('story', {name: 'Main', w: attrs.w}); convert_content(draw); writer.end('story'); if (options.page) { writer.start('page', attrs); var css = converter.item_style(draw); attrs.style = css_format(css); attrs.story = 'Main'; writer.start('text', attrs); writer.end('text'); writer.end('page'); } } function convert_canvas_message(contents) { var attrs = { w: converter.length(contents.m_lWidth), h: converter.length(contents.m_lHeight) }; writer.start('page', attrs); contents.m_DrawFront.forEach(function (obj) { convert_object(obj.m_eComponentType, obj.m_pDrawObj); }); writer.end('page'); } writer.start('document'); if (contents.m_bTextOnly) { convert_text_message(contents); } else { convert_canvas_message(contents); } writer.end('document'); } ///////////////////////////////////////////////////////////////////////////////////////////////////// // // STL -> JSON // ///////////////////////////////////////////////////////////////////////////////////////////////////// function css2rgb(input) { var m = input.match(/^#([0-9a-f]{3})$/i); if(m) { // in three-character format, each value is multiplied by 0x11 to give an // even scale from 0x00 to 0xff return [ parseInt(m[1].charAt(0),16)*0x11, parseInt(m[1].charAt(1),16)*0x11, parseInt(m[1].charAt(2),16)*0x11 ]; } m = input.match(/^#([0-9a-f]{6})$/i); if(m) { return [ parseInt(m[1].substr(0,2),16), parseInt(m[1].substr(2,2),16), parseInt(m[1].substr(4,2),16) ]; } m = input.match(/^rgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$/i); if(m) { return [m[1],m[2],m[3]]; } // @todo: color names, rgba, hsl, ... throw new Error("Unsupported color format: " + input); } function json_factory(options) { var id = 0; var factory = initialize(); var resolution = options.resolution || 1000; var multipliers = { 'pt': 1, 'px': 72/96, 'in': 72, 'pc': 72/6, 'mm': 72/25.4, 'cm': 72/2.54 }; function convert_length(len, scale) { scale = scale || resolution/72; var matches = /([0-9\.]+)(pt|px|in|pc|mm|cm|em|%)/.exec(len); if (!matches) throw new Error("Invalid length: " + len); var multiplier = multipliers[matches[2]]; if (!multiplier) throw new Error("Unsupported unit: " + matches[2]); return Math.round(parseFloat(matches[1]) * multiplier * scale); } function convert_bbox(attrs) { var x = convert_length(attrs.x || '0in'); var y = convert_length(attrs.y || '0in'); var w = convert_length(attrs.w || '1in'); var h = convert_length(attrs.h || '1in'); return { left: x, right: x+w, top: y, bottom: y+h }; } function is_autosize(css) { var result = [false, false]; if (css['-stl-shape-resize']) { var mode = css['-stl-shape-resize'].split(' '); if (mode[0] === 'free') { result[0] = (mode.length === 1) || (mode[1] === 'max'); result[1] = (mode.length === 1) || (mode[2] === 'max'); } } return result; } function initialize() { if (!json_factory.cache) { var factory = require('empower.json').factory; var instance = {}; Object.keys(factory).forEach(function(key) { var src = null; instance[key] = function clone() { if (src == null) { src = JSON.stringify(factory[key]); } return JSON.parse(src); }; }); json_factory.cache = instance; } return json_factory.cache; } function color(css_color) { var rgb = css2rgb(css_color || '#000'); var c = factory.color(); c.m_lColor = rgb[0] | (rgb[1] << 8) | (rgb[2] << 16); return c; } function font(css) { css = css || {}; var f = factory.font(); var family = css['font-family'] || "Lato"; if (options.fonts) family = options.fonts(family); f.clrFontColor = color(); f.strName = family; f.iFontHeight10X = convert_length(css['font-size'] || '10pt', 10); f.bBold = css['font-weight'] === 'bold'; f.bItalic = css['font-style'] === 'italic'; f.bUnderline = css['text-decoration'] === 'underline'; f.bStrikeThru = css['text-decoration'] === 'line-through'; return f; } function paragraph(css) { function convert_prop(property, par, key) { if (property) { par[key] = convert_length(property); } } function get_numbering_type(level, mask) { if (mask) { var match = /%\d!(.+)\\9/.exec(mask); var mask = match ? match[1] : enums.defaults.numberings[level]; var type = getKeyByValue(enums.numbering, mask); if (type) { return +type; } } throw new Error("XXX Unsupported numbering mask: " + mask); } var p = factory.paragraph(); p.iDefaultTab = resolution/4; p.iBulletFont = -1; var alignments = ['left', 'right', 'center', 'justify']; var align = alignments.indexOf(css['text-align']); if (align !== -1) p.iJustification = align; convert_prop(css['margin-left'], p, 'iLeftIndent'); convert_prop(css['margin-right'], p, 'iRightIndent'); convert_prop(css['margin-top'], p, 'iSpaceBefore'); convert_prop(css['margin-bottom'], p, 'iSpaceAfter'); if (css['-stl-list-level']) { var level = parseInt(css['-stl-list-level']); p.iNumberIndent = level + 1; p.iLeftIndent += p.iDefaultTab; p.bUserSetType = false; p.iNumberColor = 0; p.eUserSetNumber = 0; if (css['-stl-list-counter']) { p.iNumbering = enums.list.NUMBERING; p.bUserSetColor = false; p.eNumberType = get_numbering_type(level, css['-stl-list-mask']); } else { p.iNumbering = enums.list.BULLETS; p.iBulletFont = 2; p.pszNumberString = 168; } } else { p.iNumbering = enums.list.NONE; } Object.keys(p).forEach(function (key) { if (p[key] === null) { delete p[key]; } }); return p; } function link(attrs) { var l = factory.link(); l.msLink = attrs.hyperlink; return l; } function objref(type) { var r = factory.objref(); r.m_iObjType = type; return r; } function textprops() { return factory.textprops(); } function tableprops() { var p = factory.tableprops(); p.m_eEditChangeType = 1; p.m_bCanChangeFormat = true; p.m_bCanType = true; p.m_eTextField = 3; p.m_ePromptType = 1; p.m_FormFieldInfoLocal.m_clrLine = color(); p.m_FormFieldInfoLocal.m_clrFill = color('#c4c4c4'); p.m_iTabOrderID = 3; return p; } function columnprops() { var p = factory.tableprops(); p.m_eEditChangeType = 0; p.m_bCanChangeFormat = false; p.m_bCanType = false; p.m_eTextField = 0; p.m_ePromptType = 0; p.m_FormFieldInfoLocal.m_clrLine = color(); p.m_FormFieldInfoLocal.m_clrFill = color('#c4c4c4'); p.m_iTabOrderID = 0; return p; } function rowprops() { return columnprops(); } function image(attrs) { id += 2; var img = factory.image(); var uri = options.uris ? options.uris(attrs.src) : attrs.src; var casid = uri.replace(/^(cas:)/,''); var draw = img.m_pDrawObj; draw.m_oiID = id-1; draw.m_UNITSPERINCH = resolution; draw.m_pDbBitmap.m_oiDB = id-2; draw.m_pDbBitmap.m_strCASId = casid; draw.m_rectPosition = convert_bbox(attrs); return img; } function convert_pen(border) { var parts = border.split(' '); var style = enums.pen[parts[1].toUpperCase()]; if (style === undefined) { throw new Error("Unsupported pen style: " + parts[1]); } var result = { style: style, color: color(parts[2]) }; if (parts[0] !== '1px') { // handle thickness device dependent specially (@todo fixme) result.thickness = convert_length(parts[0]); } return result; } function apply_item_style(draw, css) { function convert_alignment(alignment) { if (!alignment) { return enums.valign.TOP; } var parts = alignment.split(' '); if (parts[0] === 'vertical') { switch (+(parts[1])) { case 0: return enums.valign.TOP; case 0.5: return enums.valign.CENTER; case 1: return enums.valign.BOTTOM; } } throw new Error("Unsupported alignment: ", alignment); } if (css.border) { var pen = convert_pen(css.border); draw.m_iPenWidth = pen.thickness; draw.m_iPenStyle = pen.style; draw.m_clrPen = pen.color; draw.m_bPen = true; } if (css.fill) { draw.m_clrBackGround = color(css.fill); draw.m_bBackGroundTransparent = false; } if (css['padding-left']) { draw.m_iLeftMargin = convert_length(css['padding-left']); } if (css['padding-right']) { draw.m_iRightMargin = convert_length(css['padding-right']); } if (css['padding-top']) { draw.m_iTopMargin = convert_length(css['padding-top']); } if (css['padding-bottom']) { draw.m_iBottomMargin = convert_length(css['padding-bottom']); } var as = is_autosize(css); draw.m_bAutoSizeX = as[0]; draw.m_bAutoSizeY = as[1]; draw.m_eVertJust = convert_alignment(css['-stl-alignment']); } function apply_cell_borders(shape, css) { function convert_edge(border, pos) { var edge = factory.cell_edge(); edge.m_elpPosition = pos; if (border) { var pen = convert_pen(border); edge.m_iLineWeight = pen.thickness; edge.m_iLineStyle = pen.style; edge.m_clrLine = pen.color; edge.m_bVisible = true; } else { edge.m_iLineWeight = 0; edge.m_iLineStyle = enums.pen.SOLID; edge.m_clrLine = color(); edge.m_bVisible = false; } return edge; } function convert_corner(pos) { var corner = factory.cell_corner(); corner.m_iLineStyle = enums.pen.SOLID; corner.m_iLineWeight = 0; corner.m_clrLine = color(); corner.m_ecpCorner = 2; return corner; } var segments = shape.m_ppSegments; segments.push(convert_corner(enums.segmentpos.TOP)); segments.push(convert_edge(css['border-top'], enums.segmentpos.TOP)); segments.push(convert_corner(enums.segmentpos.RIGHT)); segments.push(convert_edge(css['border-right'], enums.segmentpos.RIGHT)); segments.push(convert_corner(enums.segmentpos.BOTTOM)); segments.push(convert_edge(css['border-bottom'], enums.segmentpos.BOTTOM)); segments.push(convert_corner(enums.segmentpos.LEFT)); segments.push(convert_edge(css['border-left'], enums.segmentpos.LEFT)); } function text(attrs, css) { id += 1; var txt = factory.text(); var draw = txt.m_pDrawObj; draw.m_oiID = id-1; draw.m_rectPosition = convert_bbox(attrs); draw.m_pEditableProps = textprops(); draw.m_UNITSPERINCH = resolution, draw.m_iLogicalRes = resolution, draw.m_iDesignRes = resolution, draw.m_clrPen = color(); draw.m_iMaxWidthDes = (draw.m_rectPosition.right - draw.m_rectPosition.left); draw.m_Colors.push(color()); draw.m_Colors.push(color('#00ffc0')); draw.m_Colors.push(color('#f00')); apply_item_style(draw, css); return txt; } function column(attrs) { var col = factory.column(); col.m_iWidth = convert_length(attrs.w); col.m_clrLeft = color(); col.m_clrRight = color(); col.m_pEditableProps = columnprops(); return col; } function row(attrs, css) { var row = factory.row(); row.m_iHeight = convert_length(attrs.h); row.m_clrAbove = color(); row.m_clrBelow = color(); row.m_colorLegend = color(); row.m_pEditableProps = rowprops(); var as = is_autosize(css); row.m_bFixedSize = !(as[0] || as[1]); return row; } function cell(css, c, r, width, height) { id += 1; var draw = factory.text().m_pDrawObj; draw.m_oiID = id-1; draw.m_bAutoSizeX = false; draw.m_bAutoSizeY = false; draw.m_rectPosition.left = 0; draw.m_rectPosition.top = 0; draw.m_rectPosition.right = width; draw.m_rectPosition.bottom = height; draw.m_pEditableProps = textprops(); draw.m_UNITSPERINCH = resolution, draw.m_iLogicalRes = resolution, draw.m_iDesignRes = resolution, draw.m_clrPen = color(); draw.m_iMaxWidthDes = width; draw.m_Colors.push(color()); draw.m_Colors.push(color('#00ffc0')); draw.m_Colors.push(color('#f00')); apply_item_style(draw, css); var cell = factory.cell(); cell.m_pTextDraw = draw; cell.m_iColumn = c; cell.m_iRow = r; apply_cell_borders(cell.m_FrameSegShape, css); return cell; } function table(attrs, css) { id += 1; var tbl = factory.table(); var draw = tbl.m_pDrawObj; draw.m_oiID = id; draw.m_rectPosition = convert_bbox(attrs); draw.m_UNITSPERINCH = resolution; draw.m_clrPen = color(); draw.m_clrBrushFill = color('#00c0c0'); draw.m_clrShadow = color('#00c0c0'); draw.m_pEditableProps = tableprops(); draw.m_colorLegendFrame = color(); apply_item_style(draw, css); return tbl; } function canvas(root, template_id, attrs) { const width = convert_length(attrs.w); const height = convert_length(attrs.h); root.m_ePageType = 1; root.m_Size.width = width; root.m_Size.height = height; root.m_scopedMessageTemplate = template_id; delete root.m_oi; delete root.m_scopedMessageType; delete root.m_bCanSplitText; delete root.m_iWidowOrphan; delete root.m_bRenumberText; delete root.m_lBottomFlowMargin; delete root.m_lTopFlowMargin ; root.contents = factory.canvas(); root.contents.m_lResolution = resolution; root.contents.m_lWidth = width; root.contents.m_lHeight = height; root.contents.m_lGrowMaxY = height; root.contents.m_lTopMargin = 0; root.contents.m_lBottomMargin = 0; return root.contents; } function content(root, template_id, attrs) { const css = { '-stl-shape-resize': 'free 0pt max 0pt max', }; root.m_oi = 0; root.m_ePageType = 0; root.m_scopedMessageType = template_id; delete root.m_Size; delete root.m_scopedMessageTemplate; root.contents = factory.content(); root.contents.m_lResolution = resolution; root.contents.m_pTextDraw = text(attrs, css).m_pDrawObj; root.rule = null; return root.contents; } function root() { var r = factory.root(); r.m_iDesignResolution = resolution; return r; } return { color: color, font: font, paragraph: paragraph, link: link, objref: objref, image: image, text: text, table: table, row: row, column: column, cell: cell, content: content, canvas: canvas, root: root }; } function json_builder(nsmap, factory, root, options) { const unsupported = function (item) { var message = "Unsupported " + item; if (options.permissive) { console.error(message + " (ignored)"); return ignorant(); } throw new Error(message); }; const unexpected = function(tag, what) { var message = "Unexpected " + text + " inside " + tag; if (options.permissive) { console.error(message + " (ignored)"); } throw new Error(message); } function unexpected_text(data) { if (data.trim()) unexpected("stl:stl", "text"); } function clone_css(css) { return JSON.parse(JSON.stringify(css)); } function split_css(style, css) { css = css || {}; if (style) { style.trim().split(';').forEach(function(property) { var parts = property.trim().split(':'); if (parts.length === 2) { css[parts[0].trim()] = parts[1].trim(); } else if (parts[0].length) { throw new Error("Invalid CSS property: "+parts[0]); } }); } return css; } function table_builder(draw) { var columns = []; var rows = []; var cells = []; var column = 0; function row_(start, attrs) { if (start) { column = 0; var css = split_css(attrs.style); rows.push(factory.row(attrs, css)); } } function cell_(start, attrs) { if (start) { if (rows.length === 1) { columns.push(factory.column(attrs)); } var row = rows.length - 1; var css = split_css(attrs.style); var cell = factory.cell(css, column, row, columns[column].m_iWidth, rows[row].m_iHeight); cells.push(cell); return stl.handler_dispatcher(nsmap, story_builder(cell.m_pTextDraw)); } else { column += 1; } } function finalize() { draw.m_Rows = rows; draw.m_Columns = columns; draw.m_Cells = cells; // we override table w,h with a sum of column widths and row heights var width = columns.reduce((acc,el) => acc+el.m_iWidth, 0);; var height = rows.reduce((acc,el) => acc+el.m_iHeight, 0); draw.m_rectPosition.right = width; draw.m_rectPosition.bottom = height; } return { story_: () => {}, row_: row_, cell_: cell_, repeater_: () => unsupported("stl:repeater"), text: unexpected_text, finalize: finalize, }; } function story_builder(draw) { var paragraphs = draw.m_ParaValues; var colors = draw.m_Colors; var fonts = draw.m_TextFonts; var chars = draw.m_cChars; var commands = draw.m_sXPos; var objrefs = draw.m_Objs; var objs = draw.m_pObjs; var links = draw.m_Links; var styles = simple_stack({}); styles.dirty = true; var inside = {}; function insert_resource(resources, resource) { var id; var strres = JSON.stringify(resource); resources.find(function (element, index) { if (JSON.stringify(element) === strres) { id = index; return true; } return false; }); if (id === undefined) { id = resources.length; resources.push(resource); } return id; } function insert_pstyle() { commands.push(enums.content.PARAGRAPH_BREAK); commands.push(paragraphs.length); chars.push(enums.content.NULL); chars.push(enums.content.NULL); var css = styles.top(); paragraphs.push(factory.paragraph(css)); } function flush_cstyle() { if (styles.dirty) { var css = styles.top(); commands.push(enums.content.FONT_CHANGE); chars.push(insert_resource(fonts, factory.font(css))); commands.push(enums.content.COLOR_CHANGE); chars.push(insert_resource(colors, factory.color(css['color']))); styles.dirty = false; } } function object_start(obj) { if (inside.object) return unsupported("object nesting"); var draw = obj.m_pDrawObj; inside.object = draw; objrefs.push(factory.objref(obj.m_eComponentType)); objs.push(draw); commands.push(enums.content.OBJECT_START); chars.push(enums.content.NULL); commands.push(objrefs.length-1); chars.push(enums.content.NULL); return draw; } function object_end() { if (!inside.object) throw new Error("inconsistent object start/end"); commands.push(enums.content.OBJECT_END); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); inside.object = null; } function vertical_align(oldalign, newalign) { if (oldalign !== newalign) { switch(oldalign) { case 'super': commands.push(enums.content.SUPERSCRIPT_END); chars.push(enums.content.NULL); break; case 'sub': commands.push(enums.content.SUBSCRIPT_END); chars.push(enums.content.NULL); break; default: break; } switch(newalign) { case 'super': commands.push(enums.content.SUPERSCRIPT_START); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); break; case 'sub': commands.push(enums.content.SUBSCRIPT_START); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); break; default: break; } } } /////////////////////////////////////////////////////////////////// function block_(start, attrs) { if (start) { styles.push(split_css(attrs.style, clone_css(styles.top()))); } else { styles.pop(); } } function p_(start, attrs) { if (start) { styles.push(split_css(attrs.style, clone_css(styles.top()))); insert_pstyle(); inside.paragraph = true; } else { styles.pop(); inside.paragraph = false; } } function story_(start, attrs) { if (start) { if (inside.object) return stl.handler_dispatcher(nsmap, story_builder(inside.object)); if (!inside.hyperlink) return unsupported("stl:story"); } } function scope_(start, attrs) { if (start) { if (!attrs.hyperlink) return unsupported("stl:scope"); if (inside.hyperlink) return unsupported("stl:scope nesting"); links.push(factory.link(attrs)); commands.push(enums.content.HYPERLINK_START); chars.push(enums.content.NULL); commands.push(links.length-1); chars.push(enums.content.NULL); inside.hyperlink = true; } else { inside.hyperlink = false; commands.push(enums.content.HYPERLINK_END); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); } } function span_(start, attrs) { if (Object.keys(attrs).length) { // treat empty span as a special case var oldcss; styles.dirty = true; if (start) { oldcss = styles.top(); styles.push(split_css(attrs.style, clone_css(styles.top()))); } else { oldcss = styles.pop(); } vertical_align(oldcss['vertical-align'], styles.top()['vertical-align']); } } function image_(start, attrs) { if (start) { object_start(factory.image(attrs)); return stl.empty_checker(); } else { object_end(); } } function table_(start, attrs) { if (start) { var css = split_css(attrs.style); var draw = object_start(factory.table(attrs, css)); return stl.handler_dispatcher(nsmap, table_builder(draw)); } else { object_end(); } } function text_(start, attrs) { if (start) { if (attrs.story) return unsupported("stl:story reference"); var css = split_css(attrs.style); object_start(factory.text(attrs, css)); } else { object_end(); } } function text(data) { if (data) { if (inside.paragraph) { flush_cstyle(); range(data.length).forEach(function(index) { chars.push(data.charCodeAt(index)); commands.push(enums.content.NULL); }); } else { if (data.trim()) unexpected("text outside paragraph"); } } } function finalize() { commands.push(enums.content.CONTENT_END); chars.push(enums.content.NULL); } return { p_: p_, span_: span_, block_: block_, scope_: scope_, story_: story_, image_: image_, table_: table_, text_: text_, field_: () => unsupported("stl:field"), chart_: () => unsupported("stl:chart"), fragment_: () => unsupported("stl:fragment"), script_: () => unsupported("stl:script"), text: text, finalize: finalize }; } function item_builder(objects) { var inside = {}; function object_start(obj) { if (inside.object) return unsupported("object nesting"); objects.push(obj); var draw = obj.m_pDrawObj; inside.object = draw; return draw; } function object_end() { if (!inside.object) throw new Error("inconsistent object start/end"); inside.object = null; } function story_(start, attrs) { if (start) { if (inside.object) return stl.handler_dispatcher(nsmap, story_builder(inside.object)); return unsupported("stl:story"); } } function text_(start, attrs) { if (start) { if (attrs.story) return unsupported("stl:story reference"); var css = split_css(attrs.style); object_start(factory.text(attrs, css)); } else { object_end(); } } function image_(start, attrs) { if (start) { object_start(factory.image(attrs)); return stl.empty_checker(); } else { object_end(); } } function table_(start, attrs) { if (start) { var css = split_css(attrs.style); var draw = object_start(factory.table(attrs, css)); return stl.handler_dispatcher(nsmap, table_builder(draw)); } else { object_end(); } } return { story_: story_, text_: text_, image_: image_, table_: table_, barcode_: () => unsupported("stl:barcode"), chart_: () => unsupported("stl:chart"), fragment_: () => unsupported("stl:fragment"), text: unexpected_text, finalize: () => {} }; } function doc_builder() { function story_(start, attrs) { if (start) { if (root.contents) return unsupported("multiple stl:story"); var contents = factory.content(root, 4, attrs); return stl.handler_dispatcher(nsmap, story_builder(contents.m_pTextDraw)); } } function page_(start, attrs) { if (start) { if (root.contents) return unsupported("multiple stl:page"); var contents = factory.canvas(root, 1, attrs); return stl.handler_dispatcher(nsmap, item_builder(contents.m_DrawFront)); } } return { story_: story_, page_: page_, text: unexpected_text, finalize: () => {} }; } function root_builder() { function document_(start, attrs) { if (start) return stl.handler_dispatcher(nsmap, doc_builder()); } return { stl_: () => {}, data_: () => unsupported("stl:data"), fixtures_: () => unsupported("stl:fixtures"), style_: () => unsupported("stl:style"), document_: document_, text: unexpected_text, finalize: () => {} }; } return root_builder(); } ///////////////////////////////////////////////////////////////////////////////////////////////////////// /* * emp2stl( src: stream [, dst: stream, options: object] ) : stream * * Parses _Empower JSON_ fragment and generates corresponding * _STL_ fragment * * Parameters: * - `src` ... input stream containing _Empower JSON_ * - `dst` ... output stream to be filled with resulting _STL_ (memory stream is created by default) * - `options` ... following options are currently supported: * - `indent` ... bool, string or a function(tag, tags, is_start) used for indentation * - `page` ... bool determining whether page type should be generated * - `fonts` ... optional callback for font remap * - `uris` ... optional callback for URI remap * - `@return` ... output stream (the `dst` argument if provided, temporary memory stream otherwise) */ exports.emp2stl = function emp2stl(src, dst, options) { dst = dst || streams.stream(); options = options || {}; if (!util.isStream(src) || !util.isStream(dst)) { throw new Error("Invalid argument, stream expected"); } var contents = JSON.parse(src.read()).contents; var writer = stl_writer(dst, options.indent); build_stl(contents, writer, options); writer.finalize(); return dst; }; /* * stl2emp( src: stream [, dst: stream, options: object] ) : stream * * Parses _STL_ document and generates corresponding _Empower JSON_ fragment * * Parameters: * - `src` ... input stream containing _STL_ * - `dst` ... output stream to be filled with resulting _Empower JSON_ (memory stream is created by default) * - `options` ... following options are currently supported: * - `indent` ... bool or a string used for indentation * - `permissive` ... determines whether the conversion fails or ignores unsupported constructs * - `fonts` ... optional callback for font remap * - `uris` ... optional callback for URI remap * - `@return` ... output stream (the `dst` argument if provided, temporary memory stream otherwise) */ exports.stl2emp = function emp2stl(src, dst, options) { options = options || {}; var nsmap = stl.namespace_stack(); var factory = json_factory(options); var root = factory.root(); var builder = json_builder(nsmap, factory, root, options); var parser = stl.parser(nsmap, builder); parser.write(src.read()).close(); dst.write(JSON.stringify(root, null, options.indent)); return dst; };
docplatform/forsetup/js/tools/empower.js
// Copyright (c) 2017 Open Text. All Rights Reserved. /*jslint bitwise:true */ 'use strict'; const util = require('util'); const streams = require('streams'); const stl = require('stl'); const range = require('range'); const enums = { item: { TABLE: 5, IMAGE: 6, TEXT: 14 }, content: { NULL: 0, HYPERLINK_START: -252, OBJECT_START: -251, PARAGRAPH_BREAK: -244, SUPERSCRIPT_START: -240, SUBSCRIPT_START: -239, HYPERLINK_END: -109, OBJECT_END: -106, CONTENT_END: -64, COLOR_CHANGE: -63, FONT_CHANGE: -62, SUBSCRIPT_END: -58, SUPERSCRIPT_END: -59 }, list: { NONE: 0, BULLETS: 1, NUMBERING: 2 }, numbering: { 0: '1.', // decimal 2: 'A.', // upper-alpha 3: 'a.', // lower-alpha 4: 'R.', // upper-roman 5: 'r.', // lower-roman 6: '1)', 7: 'A)', 8: 'a)', 9: 'R)', 10: 'r)', 15: '(1)', 16: '(A)', 17: '(a)', 18: '(R)', 19: '(r)', }, pen: { SOLID: 0, DASHED: 1, DOTTED: 3 }, valign: { TOP: 0, CENTER: 1, BOTTOM: 2 }, segmentpos: { TOP: 1, RIGHT: 2, BOTTOM: 4, LEFT: 8 }, defaults: { bullets: ['•', '◦', '▪'], numberings: ['1.', '1.', 'r.', '1)'] } }; function getKeyByValue(object, value) { return Object.keys(object).find(key => object[key] === value); } function simple_stack(item) { var items = []; if (item !== undefined) items.push(item); return { push: (item) => items.push(item), pop: () => items.pop(), top: () => items[items.length-1], length: () => items.length }; } ///////////////////////////////////////////////////////////////////////////////////////////////////// // // JSON -> STL // ///////////////////////////////////////////////////////////////////////////////////////////////////// function make_indenter(indent, default_indent) { if (util.isFunction(indent)) { return indent; } if (indent) { if (util.isBoolean(indent)) { indent = default_indent || ' '; } if (util.isNumber(indent)) { indent = ' '.repeat(indent); } if (util.isString(indent)) { return () => indent; } throw new Error("Unsupported indent: " + indent); } return () => ''; } function xml_writer(stream, indenter) { var tags = []; var no_children; var content = ''; function format_start(tag, attrs) { attrs = attrs || {}; var result = '<' + tag; var keys = Object.keys(attrs); if (keys.length) { result += ' ' + keys.map(function(key) { return key + '="' + attrs[key] + '"'; }).join(' '); } return result + '>'; } function format_end(tag) { return '</' + tag + '>'; } function flush() { content += cache; cache = ''; } function start(tag, attrs) { var line = format_start(tag, attrs); var indent = indenter(tag, tags, true); if (indent) { line = '\n' + indent.repeat(tags.length) + line; } content += line; no_children = true; tags.push(tag); } function end(tag) { var top = tags.pop(); if (top !== tag) { throw new Error("Tag mismatch (trying to close '" + tag + "' while top element is '" + top + "')"); } if (no_children) { content = content.slice(0, -1) + '/>'; no_children = false; } else { var line = format_end(tag); var indent = indenter(tag, tags, false); if (indent) { line = '\n' + indent.repeat(tags.length) + line; } content += line; } } function text(data) { if (!tags.length) { throw new Error("Cannot write text '" + data + "' outside elements"); } no_children = false; content += data; } function finalize() { stream.write(content); } return { start: start, end: end, text: text, finalize: finalize }; } function stl_writer(stream, indent) { var writer = xml_writer(stream, make_indenter(indent)); function start(tag, attrs) { if (attrs && attrs.style === '') { delete attrs.style; } writer.start('stl:' + tag, attrs); } function end(tag) { writer.end('stl:' + tag); } function text(data) { writer.text(data); } function finalize() { end('stl'); writer.finalize(); writer = null; } var attrs = { 'xmlns:stl': stl.namespaces.stl, version: stl.version }; start('stl', attrs); return { start: start, end: end, text: text, finalize: finalize }; } function css_format(css) { return Object.keys(css).filter(function (key) { return css[key] !== null; }).map(function (key) { return key + ': ' + css[key]; }).join('; '); } function css_converter(resolution, options) { function convert_color(col, map_black_as_null) { function hex(d) { return ('0' + (d.toString(16))).slice(-2).toUpperCase(); } if (col.m_eColorModel !== 0) { throw new Error("Unsupported color model: " + col.m_eColorModel); } var r = col.m_lColor & 0xff; var g = (col.m_lColor >> 8) & 0xff; var b = (col.m_lColor >> 16) & 0xff; return (map_black_as_null && !r && !b && !g) ? null : '#' + hex(r) + hex(g) + hex(b); } function convert_length(v) { return v / resolution + 'in'; } function convert_pos(rect, attrs) { attrs = attrs || {}; if (rect.left) { attrs.x = convert_length(rect.left); } if (rect.top) { attrs.y = convert_length(rect.top); } return attrs; } function convert_dim(rect, attrs) { attrs = attrs || {}; if (rect.right) { attrs.w = convert_length(rect.right - rect.left); } if (rect.bottom) { attrs.h = convert_length(rect.bottom - rect.top); } return attrs; } function convert_bbox(rect, attrs) { attrs = attrs || {}; convert_pos(rect, attrs); convert_dim(rect, attrs); return attrs; } function convert_rowbox(row, attrs) { attrs = attrs || {}; attrs.h = convert_length(row.m_iHeight); if (!row.m_bFixedSize) { var css = { '-stl-shape-resize': 'free 0pt max 0pt max' }; attrs.style = css_format(css); } return attrs; } function css_reset() { return { 'color': null, 'font-family': null, 'font-size': null, 'font-weight': null, 'font-style': null, 'text-decoration': null, 'text-align': null, 'margin-left': null, 'margin-right': null, 'margin-top': null, 'margin-bottom': null, 'padding-left': null, 'padding-right': null, 'padding-top': null, 'padding-bottom': null, 'border': null, 'border-top': null, 'border-right': null, 'border-bottom': null, 'border-left': null, 'fill': null, '-stl-list-counter': null, '-stl-list-mask': null, '-stl-list-level': null, '-stl-tabs': null, '-stl-shape-resize': null, '-stl-alignment': null //'vertical-align': null // handled specially (@todo fixit) }; } function convert_parstyle(ps, css) { css = css || css_reset(); var alignments = ['left', 'right', 'center', 'justify']; if (ps.iJustification) { css['text-align'] = alignments[ps.iJustification]; } if (ps.iLeftIndent) { css['margin-left'] = convert_length(ps.iLeftIndent); } if (ps.iRightIndent) { css['margin-right'] = convert_length(ps.iRightIndent); } if (ps.iSpaceBefore) { css['margin-top'] = convert_length(ps.iSpaceBefore); } if (ps.iSpaceAfter) { css['margin-bottom'] = convert_length(ps.iSpaceAfter); } var level; var format; switch (ps.iNumbering) { case enums.list.NONE: break; case enums.list.BULLETS: level = ps.iNumberIndent - 1; format = enums.defaults.bullets[level % 3]; css['-stl-list-mask'] = format + '\\9'; css['-stl-list-level'] = level; css['-stl-tabs'] = convert_length(250 * (level + 1)); css['margin-left'] = convert_length(250 * level); break; case enums.list.NUMBERING: level = ps.iNumberIndent - 1; format = enums.numbering[ps.eNumberType]; if (format === undefined) { throw new Error("Unknown eNumberType: " + ps.eNumberType); } css['-stl-list-counter'] = 'default_counter'; css['-stl-list-mask'] = '%' + level + '!' + format + '\\9'; css['-stl-list-level'] = level; css['-stl-tabs'] = convert_length(250 * (level + 1)); css['margin-left'] = convert_length(250 * level); break; default: throw new Error('Unsupported numbering mode: ' + ps.iNumbering); } return css; } function convert_font(name) { return options.fonts ? options.fonts(name) : name; } function convert_charstyle(cs, css) { css = css || css_reset(); css['font-family'] = convert_font(cs.strName); css['font-size'] = cs.iFontHeight10X / 10 + 'pt'; if (cs.bBold) { css['font-weight'] = 'bold'; } if (cs.bItalic) { css['font-style'] = 'italic'; } if (cs.bUnderline) { css['text-decoration'] = 'underline'; } if (cs.bStrikeThru) { css['text-decoration'] = 'line-through'; } return css; } function convert_pen(thickness, style, color) { function pen_style(src) { var key = getKeyByValue(enums.pen, src); if (!key) { throw new Error('Unsupported pen style: ' + src); } return key.toLowerCase(); } thickness = thickness ? convert_length(thickness) : '1px'; return thickness + ' ' + pen_style(style) + ' ' + convert_color(color); } function convert_padding(draw, css) { css = css || css_reset(); if (draw.m_iLeftMargin) { css['padding-left'] = convert_length(draw.m_iLeftMargin); } if (draw.m_iRightMargin) { css['padding-right'] = convert_length(draw.m_iRightMargin); } if (draw.m_iTopMargin) { css['padding-top'] = convert_length(draw.m_iTopMargin); } if (draw.m_iBottomMargin) { css['padding-bottom'] = convert_length(draw.m_iBottomMargin); } return css; } function convert_item_style(draw, css) { css = css || css_reset(); if (draw.m_bPen === true) { css.border = convert_pen(draw.m_iPenWidth, draw.m_iPenStyle, draw.m_clrPen); } if (draw.m_bBackGroundTransparent === false) { css.fill = convert_color(draw.m_clrBackGround); } convert_padding(draw, css); if (draw.m_bAutoSizeX || draw.m_bAutoSizeY) { var x = draw.m_bAutoSizeX ? 'max' : '0pt'; var y = draw.m_bAutoSizeY ? 'max' : '0pt'; css['-stl-shape-resize'] = ['free', x, y, x, y].join(' '); } switch (draw.m_eVertJust) { case undefined: case enums.valign.TOP: css['-stl-alignment'] = null; break; case enums.valign.CENTER: css['-stl-alignment'] = 'vertical 0.5'; break; case enums.valign.BOTTOM: css['-stl-alignment'] = 'vertical 1'; break; default: throw new Error("Unsupported vertical justification: " + draw.m_eVertJust); } return css; } function convert_cell_border(cell, row, column, css) { css = css || css_reset(); if (row.m_iLineAbove !== -1) { css['border-top'] = convert_pen(row.m_iWeightAbove, row.m_iLineAbove, row.m_clrAbove); } if (row.m_iLineBelow !== -1) { css['border-bottom'] = convert_pen(row.m_iWeightBelow, row.m_iLineBelow, row.m_clrBelow); } if (column.m_iLineLeft !== -1) { css['border-left'] = convert_pen(column.m_iWeightLeft, column.m_iLineLeft, column.m_clrLeft); } if (column.m_iLineRight !== -1) { css['border-right'] = convert_pen(column.m_iWeightRight, column.m_iLineRight, column.m_clrRight); } cell.m_FrameSegShape.m_ppSegments.forEach(function (segment) { if (segment.m_estType === 1 && segment.m_bVisible) { var pen = convert_pen(segment.m_iLineWeight, segment.m_iLineStyle, segment.m_clrLine); switch (segment.m_elpPosition) { case enums.segmentpos.TOP: css['border-top'] = pen; break; case enums.segmentpos.RIGHT: css['border-right'] = pen; break; case enums.segmentpos.BOTTOM: css['border-bottom'] = pen; break; case enums.segmentpos.LEFT: css['border-left'] = pen; break; default: // is it a mask? throw new Error("Unsupported segment position: " + segment.m_elpPosition); } } }); return css; } return { length: convert_length, pos: convert_pos, dim: convert_dim, bbox: convert_bbox, rowbox: convert_rowbox, color: convert_color, parstyle: convert_parstyle, charstyle: convert_charstyle, item_style: convert_item_style, cell_border: convert_cell_border }; } function content_inserter(writer) { const states = { CLOSED: 0, CACHED: 1, OPEN: 2 }; var style = { state: states.CLOSED, css: {} }; var blackspace = null; var paragraph = null; function padding() { // generate empty span to avoid whitespace trim writer.start('span'); writer.end('span'); } function flush() { if (blackspace === false && paragraph) { padding(); } if (style.state === states.OPEN) { writer.end('span'); } style.state = states.CACHED; } function style_change(css) { var modified = false; Object.keys(css).forEach(function (key) { var value = css[key]; if (style.css[key] !== value) { style.css[key] = value; modified = true; } }); if (modified) { flush(); } } function push(tag, attrs) { flush(); blackspace = null; writer.start(tag, attrs); } function pop(tag) { flush(); writer.end(tag); } function paragraph_start(css) { if (paragraph === true) { throw new Error("Paragraph nesting not supported"); } push('p', {style: css_format(css)}); paragraph = true; } function paragraph_end() { if (paragraph === null) { return; } if (paragraph === false) { throw new Error("Paragraph already closed"); } pop('p'); paragraph = false; } function character(ch) { if (style.state === states.CACHED) { writer.start('span', {style: css_format(style.css)}); style.state = states.OPEN; blackspace = null; } if (/\s/.test(ch)) { if (!blackspace) { padding(); } blackspace = false; } else { blackspace = true; } writer.text(ch); } return { style_change: style_change, paragraph_start: paragraph_start, paragraph_end: paragraph_end, character: character, push: push, pop: pop }; } function build_stl(contents, writer, options) { var converter = css_converter(contents.m_lResolution, options); var convert_object; function convert_content(draw, inserter) { inserter = inserter || content_inserter(writer); draw.m_cChars.forEach(function (code, index) { var cmd = draw.m_sXPos[index]; switch (cmd) { case enums.content.HYPERLINK_START: inserter.push('scope', {'hyperlink': draw.m_Links[code].msLink}); inserter.push('story'); break; case enums.content.OBJECT_START: convert_object(draw.m_Objs[code].m_iObjType, draw.m_pObjs[code], inserter); break; case enums.content.PARAGRAPH_BREAK: inserter.paragraph_end(); inserter.paragraph_start(converter.parstyle(draw.m_ParaValues[draw.m_sXPos[index + 1]])); break; case enums.content.SUPERSCRIPT_START: inserter.style_change({'vertical-align': 'super'}); break; case enums.content.SUBSCRIPT_START: inserter.style_change({'vertical-align': 'sub'}); break; case enums.content.HYPERLINK_END: inserter.pop('story'); inserter.pop('scope'); break; case enums.content.OBJECT_END: break; case enums.content.CONTENT_END: inserter.paragraph_end(); break; case enums.content.COLOR_CHANGE: inserter.style_change({'color': converter.color(draw.m_Colors[code], true)}); break; case enums.content.FONT_CHANGE: inserter.style_change(converter.charstyle(draw.m_TextFonts[code])); break; case enums.content.SUBSCRIPT_END: case enums.content.SUPERSCRIPT_END: inserter.style_change({'vertical-align': null}); break; default: if (cmd >= 0 && code > 0) { inserter.character(String.fromCharCode(code)); } break; } }); } function convert_table(draw, inserter) { function convert_row(row, r) { inserter.push('row', converter.rowbox(row)); draw.m_Columns.forEach(function (column, c) { var cell = draw.m_Cells.find(function (cell) { return cell.m_iColumn === c && cell.m_iRow === r; }); var attrs = {}; if (r === 0) { attrs.w = converter.length(column.m_iWidth); } var css = converter.item_style(cell.m_pTextDraw); converter.cell_border(cell, row, column, css); attrs.style = css_format(css); inserter.push('cell', attrs); convert_content(cell.m_pTextDraw); inserter.pop('cell'); }); inserter.pop('row'); } // we do not convert table width & height, we convert row/column dimensions instead var attrs = converter.pos(draw.m_rectPosition); var css = converter.item_style(draw); attrs.style = css_format(css); inserter.push('table', attrs); inserter.push('story'); draw.m_Rows.forEach(convert_row); inserter.pop('story'); inserter.pop('table'); } function convert_image(draw, inserter) { var attrs = converter.bbox(draw.m_rectPosition); var uri = 'cas:' + draw.m_pDbBitmap.m_strCASId; attrs.src = options.uris ? options.uris(uri) : uri; inserter.push('image', attrs); inserter.pop('image'); } function convert_text(draw, inserter) { var attrs = converter.bbox(draw.m_rectPosition); var css = converter.item_style(draw); attrs.style = css_format(css); inserter.push('text', attrs); inserter.push('story'); convert_content(draw); inserter.pop('story'); inserter.pop('text'); } convert_object = function (type, draw, inserter) { inserter = inserter || content_inserter(writer); switch (type) { case enums.item.TABLE: convert_table(draw, inserter); break; case enums.item.IMAGE: convert_image(draw, inserter); break; case enums.item.TEXT: convert_text(draw, inserter); break; default: throw new Error("Unsupported object type: " + type); } }; function convert_text_message(contents) { var draw = contents.m_pTextDraw; var attrs = converter.bbox(draw.m_rectPosition); writer.start('story', {name: 'Main', w: attrs.w}); convert_content(draw); writer.end('story'); if (options.page) { writer.start('page', attrs); var css = converter.item_style(draw); attrs.style = css_format(css); attrs.story = 'Main'; writer.start('text', attrs); writer.end('text'); writer.end('page'); } } function convert_canvas_message(contents) { var attrs = { w: converter.length(contents.m_lWidth), h: converter.length(contents.m_lHeight) }; writer.start('page', attrs); contents.m_DrawFront.forEach(function (obj) { convert_object(obj.m_eComponentType, obj.m_pDrawObj); }); writer.end('page'); } writer.start('document'); if (contents.m_bTextOnly) { convert_text_message(contents); } else { convert_canvas_message(contents); } writer.end('document'); } ///////////////////////////////////////////////////////////////////////////////////////////////////// // // STL -> JSON // ///////////////////////////////////////////////////////////////////////////////////////////////////// function css2rgb(input) { var m = input.match(/^#([0-9a-f]{3})$/i); if(m) { // in three-character format, each value is multiplied by 0x11 to give an // even scale from 0x00 to 0xff return [ parseInt(m[1].charAt(0),16)*0x11, parseInt(m[1].charAt(1),16)*0x11, parseInt(m[1].charAt(2),16)*0x11 ]; } m = input.match(/^#([0-9a-f]{6})$/i); if(m) { return [ parseInt(m[1].substr(0,2),16), parseInt(m[1].substr(2,2),16), parseInt(m[1].substr(4,2),16) ]; } m = input.match(/^rgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$/i); if(m) { return [m[1],m[2],m[3]]; } // @todo: color names, rgba, hsl, ... throw new Error("Unsupported color format: " + input); } function json_factory(options) { var id = 0; var factory = initialize(); var resolution = options.resolution || 1000; var multipliers = { 'pt': 1, 'px': 72/96, 'in': 72, 'pc': 72/6, 'mm': 72/25.4, 'cm': 72/2.54 }; function convert_length(len, scale) { scale = scale || resolution/72; var matches = /([0-9\.]+)(pt|px|in|pc|mm|cm|em|%)/.exec(len); if (!matches) throw new Error("Invalid length: " + len); var multiplier = multipliers[matches[2]]; if (!multiplier) throw new Error("Unsupported unit: " + matches[2]); return Math.round(parseFloat(matches[1]) * multiplier * scale); } function convert_bbox(attrs) { var x = convert_length(attrs.x || '0in'); var y = convert_length(attrs.y || '0in'); var w = convert_length(attrs.w || '1in'); var h = convert_length(attrs.h || '1in'); return { left: x, right: x+w, top: y, bottom: y+h }; } function is_autosize(css) { var result = [false, false]; if (css['-stl-shape-resize']) { var mode = css['-stl-shape-resize'].split(' '); if (mode[0] === 'free') { result[0] = (mode.length === 1) || (mode[1] === 'max'); result[1] = (mode.length === 1) || (mode[2] === 'max'); } } return result; } function initialize() { if (!json_factory.cache) { var factory = require('empower.json').factory; var instance = {}; Object.keys(factory).forEach(function(key) { var src = null; instance[key] = function clone() { if (src == null) { src = JSON.stringify(factory[key]); } return JSON.parse(src); }; }); json_factory.cache = instance; } return json_factory.cache; } function color(css_color) { var rgb = css2rgb(css_color || '#000'); var c = factory.color(); c.m_lColor = rgb[0] | (rgb[1] << 8) | (rgb[2] << 16); return c; } function font(css) { css = css || {}; var f = factory.font(); var family = css['font-family'] || "Lato"; if (options.fonts) family = options.fonts(family); f.clrFontColor = color(); f.strName = family; f.iFontHeight10X = convert_length(css['font-size'] || '10pt', 10); f.bBold = css['font-weight'] === 'bold'; f.bItalic = css['font-style'] === 'italic'; f.bUnderline = css['text-decoration'] === 'underline'; f.bStrikeThru = css['text-decoration'] === 'line-through'; return f; } function paragraph(css) { function convert_prop(property, par, key) { if (property) { par[key] = convert_length(property); } } function get_numbering_type(level, mask) { if (mask) { var match = /%\d!(.+)\\9/.exec(mask); var mask = match ? match[1] : enums.defaults.numberings[level]; var type = getKeyByValue(enums.numbering, mask); if (type) { return +type; } } throw new Error("XXX Unsupported numbering mask: " + mask); } var p = factory.paragraph(); p.iDefaultTab = resolution/4; p.iBulletFont = -1; var alignments = ['left', 'right', 'center', 'justify']; var align = alignments.indexOf(css['text-align']); if (align !== -1) p.iJustification = align; convert_prop(css['margin-left'], p, 'iLeftIndent'); convert_prop(css['margin-right'], p, 'iRightIndent'); convert_prop(css['margin-top'], p, 'iSpaceBefore'); convert_prop(css['margin-bottom'], p, 'iSpaceAfter'); if (css['-stl-list-level']) { var level = parseInt(css['-stl-list-level']); p.iNumberIndent = level + 1; p.iLeftIndent += p.iDefaultTab; p.bUserSetType = false; p.iNumberColor = 0; p.eUserSetNumber = 0; if (css['-stl-list-counter']) { p.iNumbering = enums.list.NUMBERING; p.bUserSetColor = false; p.eNumberType = get_numbering_type(level, css['-stl-list-mask']); } else { p.iNumbering = enums.list.BULLETS; p.iBulletFont = 2; p.pszNumberString = 168; } } else { p.iNumbering = enums.list.NONE; } Object.keys(p).forEach(function (key) { if (p[key] === null) { delete p[key]; } }); return p; } function link(attrs) { var l = factory.link(); l.msLink = attrs.hyperlink; return l; } function objref(type) { var r = factory.objref(); r.m_iObjType = type; return r; } function textprops() { return factory.textprops(); } function tableprops() { var p = factory.tableprops(); p.m_eEditChangeType = 1; p.m_bCanChangeFormat = true; p.m_bCanType = true; p.m_eTextField = 3; p.m_ePromptType = 1; p.m_FormFieldInfoLocal.m_clrLine = color(); p.m_FormFieldInfoLocal.m_clrFill = color('#c4c4c4'); p.m_iTabOrderID = 3; return p; } function columnprops() { var p = factory.tableprops(); p.m_eEditChangeType = 0; p.m_bCanChangeFormat = false; p.m_bCanType = false; p.m_eTextField = 0; p.m_ePromptType = 0; p.m_FormFieldInfoLocal.m_clrLine = color(); p.m_FormFieldInfoLocal.m_clrFill = color('#c4c4c4'); p.m_iTabOrderID = 0; return p; } function rowprops() { return columnprops(); } function image(attrs) { id += 2; var img = factory.image(); var uri = options.uris ? options.uris(attrs.src) : attrs.src; var casid = uri.replace(/^(cas:)/,''); var draw = img.m_pDrawObj; draw.m_oiID = id-1; draw.m_UNITSPERINCH = resolution; draw.m_pDbBitmap.m_oiDB = id-2; draw.m_pDbBitmap.m_strCASId = casid; draw.m_rectPosition = convert_bbox(attrs); return img; } function convert_pen(border) { var parts = border.split(' '); var style = enums.pen[parts[1].toUpperCase()]; if (style === undefined) { throw new Error("Unsupported pen style: " + parts[1]); } var result = { style: style, color: color(parts[2]) }; if (parts[0] !== '1px') { // handle thickness device dependent specially (@todo fixme) result.thickness = convert_length(parts[0]); } return result; } function apply_item_style(draw, css) { function convert_alignment(alignment) { if (!alignment) { return enums.valign.TOP; } var parts = alignment.split(' '); if (parts[0] === 'vertical') { switch (+(parts[1])) { case 0: return enums.valign.TOP; case 0.5: return enums.valign.CENTER; case 1: return enums.valign.BOTTOM; } } throw new Error("Unsupported alignment: ", alignment); } if (css.border) { var pen = convert_pen(css.border); draw.m_iPenWidth = pen.thickness; draw.m_iPenStyle = pen.style; draw.m_clrPen = pen.color; draw.m_bPen = true; } if (css.fill) { draw.m_clrBackGround = color(css.fill); draw.m_bBackGroundTransparent = false; } if (css['padding-left']) { draw.m_iLeftMargin = convert_length(css['padding-left']); } if (css['padding-right']) { draw.m_iRightMargin = convert_length(css['padding-right']); } if (css['padding-top']) { draw.m_iTopMargin = convert_length(css['padding-top']); } if (css['padding-bottom']) { draw.m_iBottomMargin = convert_length(css['padding-bottom']); } var as = is_autosize(css); draw.m_bAutoSizeX = as[0]; draw.m_bAutoSizeY = as[1]; draw.m_eVertJust = convert_alignment(css['-stl-alignment']); } function apply_cell_borders(shape, css) { function convert_edge(border, pos) { var edge = factory.cell_edge(); edge.m_elpPosition = pos; if (border) { var pen = convert_pen(border); edge.m_iLineWeight = pen.thickness; edge.m_iLineStyle = pen.style; edge.m_clrLine = pen.color; edge.m_bVisible = true; } else { edge.m_iLineWeight = 0; edge.m_iLineStyle = enums.pen.SOLID; edge.m_clrLine = color(); edge.m_bVisible = false; } return edge; } function convert_corner(pos) { var corner = factory.cell_corner(); corner.m_iLineStyle = enums.pen.SOLID; corner.m_iLineWeight = 0; corner.m_clrLine = color(); corner.m_ecpCorner = 2; return corner; } var segments = shape.m_ppSegments; segments.push(convert_corner(enums.segmentpos.TOP)); segments.push(convert_edge(css['border-top'], enums.segmentpos.TOP)); segments.push(convert_corner(enums.segmentpos.RIGHT)); segments.push(convert_edge(css['border-right'], enums.segmentpos.RIGHT)); segments.push(convert_corner(enums.segmentpos.BOTTOM)); segments.push(convert_edge(css['border-bottom'], enums.segmentpos.BOTTOM)); segments.push(convert_corner(enums.segmentpos.LEFT)); segments.push(convert_edge(css['border-left'], enums.segmentpos.LEFT)); } function text(attrs, css) { id += 1; var txt = factory.text(); var draw = txt.m_pDrawObj; draw.m_oiID = id-1; draw.m_rectPosition = convert_bbox(attrs); draw.m_pEditableProps = textprops(); draw.m_UNITSPERINCH = resolution, draw.m_iLogicalRes = resolution, draw.m_iDesignRes = resolution, draw.m_clrPen = color(); draw.m_iMaxWidthDes = (draw.m_rectPosition.right - draw.m_rectPosition.left); draw.m_Colors.push(color()); draw.m_Colors.push(color('#00ffc0')); draw.m_Colors.push(color('#f00')); apply_item_style(draw, css); return txt; } function column(attrs) { var col = factory.column(); col.m_iWidth = convert_length(attrs.w); col.m_clrLeft = color(); col.m_clrRight = color(); col.m_pEditableProps = columnprops(); return col; } function row(attrs, css) { var row = factory.row(); row.m_iHeight = convert_length(attrs.h); row.m_clrAbove = color(); row.m_clrBelow = color(); row.m_colorLegend = color(); row.m_pEditableProps = rowprops(); var as = is_autosize(css); row.m_bFixedSize = !(as[0] || as[1]); return row; } function cell(css, c, r, width, height) { id += 1; var draw = factory.text().m_pDrawObj; draw.m_oiID = id-1; draw.m_bAutoSizeX = false; draw.m_bAutoSizeY = false; draw.m_rectPosition.left = 0; draw.m_rectPosition.top = 0; draw.m_rectPosition.right = width; draw.m_rectPosition.bottom = height; draw.m_pEditableProps = textprops(); draw.m_UNITSPERINCH = resolution, draw.m_iLogicalRes = resolution, draw.m_iDesignRes = resolution, draw.m_clrPen = color(); draw.m_iMaxWidthDes = width; draw.m_Colors.push(color()); draw.m_Colors.push(color('#00ffc0')); draw.m_Colors.push(color('#f00')); apply_item_style(draw, css); var cell = factory.cell(); cell.m_pTextDraw = draw; cell.m_iColumn = c; cell.m_iRow = r; apply_cell_borders(cell.m_FrameSegShape, css); return cell; } function table(attrs, css) { id += 1; var tbl = factory.table(); var draw = tbl.m_pDrawObj; draw.m_oiID = id; draw.m_rectPosition = convert_bbox(attrs); draw.m_UNITSPERINCH = resolution; draw.m_clrPen = color(); draw.m_clrBrushFill = color('#00c0c0'); draw.m_clrShadow = color('#00c0c0'); draw.m_pEditableProps = tableprops(); draw.m_colorLegendFrame = color(); apply_item_style(draw, css); return tbl; } function canvas(root, template_id, attrs) { const width = convert_length(attrs.w); const height = convert_length(attrs.h); root.m_ePageType = 1; root.m_Size.width = width; root.m_Size.height = height; root.m_scopedMessageTemplate = template_id; delete root.m_oi; delete root.m_scopedMessageType; delete root.m_bCanSplitText; delete root.m_iWidowOrphan; delete root.m_bRenumberText; delete root.m_lBottomFlowMargin; delete root.m_lTopFlowMargin ; root.contents = factory.canvas(); root.contents.m_lResolution = resolution; root.contents.m_lWidth = width; root.contents.m_lHeight = height; root.contents.m_lGrowMaxY = height; root.contents.m_lTopMargin = 0; root.contents.m_lBottomMargin = 0; return root.contents; } function content(root, template_id, attrs) { const css = { '-stl-shape-resize': 'free 0pt max 0pt max', }; root.m_oi = 0; root.m_ePageType = 0; root.m_scopedMessageType = template_id; delete root.m_Size; delete root.m_scopedMessageTemplate; root.contents = factory.content(); root.contents.m_lResolution = resolution; root.contents.m_pTextDraw = text(attrs, css).m_pDrawObj; root.rule = null; return root.contents; } function root() { var r = factory.root(); r.m_iDesignResolution = resolution; return r; } return { color: color, font: font, paragraph: paragraph, link: link, objref: objref, image: image, text: text, table: table, row: row, column: column, cell: cell, content: content, canvas: canvas, root: root }; } function json_builder(nsmap, factory, root, options) { const unsupported = function (item) { var message = "Unsupported " + item; if (options.permissive) { console.error(message + " (ignored)"); return ignorant(); } throw new Error(message); }; const unexpected = function(tag, what) { var message = "Unexpected " + text + " inside " + tag; if (options.permissive) { console.error(message + " (ignored)"); } throw new Error(message); } function unexpected_text(data) { if (data.trim()) unexpected("stl:stl", "text"); } function clone_css(css) { return JSON.parse(JSON.stringify(css)); } function split_css(style, css) { css = css || {}; if (style) { style.trim().split(';').forEach(function(property) { var parts = property.trim().split(':'); if (parts.length === 2) { css[parts[0].trim()] = parts[1].trim(); } else if (parts[0].length) { throw new Error("Invalid CSS property: "+parts[0]); } }); } return css; } function table_builder(draw) { var columns = []; var rows = []; var cells = []; var column = 0; function row_(start, attrs) { if (start) { column = 0; var css = split_css(attrs.style); rows.push(factory.row(attrs, css)); } } function cell_(start, attrs) { if (start) { if (rows.length === 1) { columns.push(factory.column(attrs)); } var row = rows.length - 1; var css = split_css(attrs.style); var cell = factory.cell(css, column, row, columns[column].m_iWidth, rows[row].m_iHeight); cells.push(cell); return stl.handler_dispatcher(nsmap, story_builder(cell.m_pTextDraw)); } else { column += 1; } } function finalize() { draw.m_Rows = rows; draw.m_Columns = columns; draw.m_Cells = cells; // we override table w,h with a sum of column widths and row heights var width = columns.reduce((acc,el) => acc+el.m_iWidth, 0);; var height = rows.reduce((acc,el) => acc+el.m_iHeight, 0); draw.m_rectPosition.right = width; draw.m_rectPosition.bottom = height; } return { story_: () => {}, row_: row_, cell_: cell_, repeater_: () => unsupported("stl:repeater"), text: unexpected_text, finalize: finalize, }; } function story_builder(draw) { var paragraphs = draw.m_ParaValues; var colors = draw.m_Colors; var fonts = draw.m_TextFonts; var chars = draw.m_cChars; var commands = draw.m_sXPos; var objrefs = draw.m_Objs; var objs = draw.m_pObjs; var links = draw.m_Links; var styles = simple_stack({}); styles.dirty = true; var inside = {}; function insert_resource(resources, resource) { var id; var strres = JSON.stringify(resource); resources.find(function (element, index) { if (JSON.stringify(element) === strres) { id = index; return true; } return false; }); if (id === undefined) { id = resources.length; resources.push(resource); } return id; } function insert_pstyle() { commands.push(enums.content.PARAGRAPH_BREAK); commands.push(paragraphs.length); chars.push(enums.content.NULL); chars.push(enums.content.NULL); var css = styles.top(); paragraphs.push(factory.paragraph(css)); } function flush_cstyle() { if (styles.dirty) { var css = styles.top(); commands.push(enums.content.FONT_CHANGE); chars.push(insert_resource(fonts, factory.font(css))); commands.push(enums.content.COLOR_CHANGE); chars.push(insert_resource(colors, factory.color(css['color']))); styles.dirty = false; } } function object_start(obj) { if (inside.object) return unsupported("object nesting"); var draw = obj.m_pDrawObj; inside.object = draw; objrefs.push(factory.objref(obj.m_eComponentType)); objs.push(draw); commands.push(enums.content.OBJECT_START); chars.push(enums.content.NULL); commands.push(objrefs.length-1); chars.push(enums.content.NULL); return draw; } function object_end() { if (!inside.object) throw new Error("inconsistent object start/end"); commands.push(enums.content.OBJECT_END); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); inside.object = null; } function vertical_align(oldalign, newalign) { if (oldalign !== newalign) { switch(oldalign) { case 'super': commands.push(enums.content.SUPERSCRIPT_END); chars.push(enums.content.NULL); break; case 'sub': commands.push(enums.content.SUBSCRIPT_END); chars.push(enums.content.NULL); break; default: break; } switch(newalign) { case 'super': commands.push(enums.content.SUPERSCRIPT_START); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); break; case 'sub': commands.push(enums.content.SUBSCRIPT_START); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); break; default: break; } } } /////////////////////////////////////////////////////////////////// function block_(start, attrs) { if (start) { styles.push(split_css(attrs.style, clone_css(styles.top()))); } else { styles.pop(); } } function p_(start, attrs) { if (start) { styles.push(split_css(attrs.style, clone_css(styles.top()))); insert_pstyle(); inside.paragraph = true; } else { styles.pop(); inside.paragraph = false; } } function story_(start, attrs) { if (start) { if (inside.object) return stl.handler_dispatcher(nsmap, story_builder(inside.object)); if (!inside.hyperlink) return unsupported("stl:story"); } } function scope_(start, attrs) { if (start) { if (!attrs.hyperlink) return unsupported("stl:scope"); if (inside.hyperlink) return unsupported("stl:scope nesting"); links.push(factory.link(attrs)); commands.push(enums.content.HYPERLINK_START); chars.push(enums.content.NULL); commands.push(links.length-1); chars.push(enums.content.NULL); inside.hyperlink = true; } else { inside.hyperlink = false; commands.push(enums.content.HYPERLINK_END); chars.push(enums.content.NULL); commands.push(enums.content.NULL); chars.push(enums.content.NULL); } } function span_(start, attrs) { if (Object.keys(attrs).length) { // treat empty span as a special case var oldcss; styles.dirty = true; if (start) { oldcss = styles.top(); styles.push(split_css(attrs.style, clone_css(styles.top()))); } else { oldcss = styles.pop(); } vertical_align(oldcss['vertical-align'], styles.top()['vertical-align']); } } function image_(start, attrs) { if (start) { object_start(factory.image(attrs)); return stl.empty_checker(); } else { object_end(); } } function table_(start, attrs) { if (start) { var css = split_css(attrs.style); var draw = object_start(factory.table(attrs, css)); return stl.handler_dispatcher(nsmap, table_builder(draw)); } else { object_end(); } } function text_(start, attrs) { if (start) { if (attrs.story) return unsupported("stl:story reference"); var css = split_css(attrs.style); object_start(factory.text(attrs, css)); } else { object_end(); } } function text(data) { if (data) { if (inside.paragraph) { flush_cstyle(); range(data.length).forEach(function(index) { chars.push(data.charCodeAt(index)); commands.push(enums.content.NULL); }); } else { if (data.trim()) unexpected("text outside paragraph"); } } } function finalize() { commands.push(enums.content.CONTENT_END); chars.push(enums.content.NULL); } return { p_: p_, span_: span_, block_: block_, scope_: scope_, story_: story_, image_: image_, table_: table_, text_: text_, field_: () => unsupported("stl:field"), chart_: () => unsupported("stl:chart"), fragment_: () => unsupported("stl:fragment"), script_: () => unsupported("stl:script"), text: text, finalize: finalize }; } function item_builder(objects) { var inside = {}; function object_start(obj) { if (inside.object) return unsupported("object nesting"); objects.push(obj); var draw = obj.m_pDrawObj; inside.object = draw; return draw; } function object_end() { if (!inside.object) throw new Error("inconsistent object start/end"); inside.object = null; } function story_(start, attrs) { if (start) { if (inside.object) return stl.handler_dispatcher(nsmap, story_builder(inside.object)); return unsupported("stl:story"); } } function text_(start, attrs) { if (start) { if (attrs.story) return unsupported("stl:story reference"); var css = split_css(attrs.style); object_start(factory.text(attrs, css)); } else { object_end(); } } function image_(start, attrs) { if (start) { object_start(factory.image(attrs)); return stl.empty_checker(); } else { object_end(); } } function table_(start, attrs) { if (start) { var css = split_css(attrs.style); var draw = object_start(factory.table(attrs, css)); return stl.handler_dispatcher(nsmap, table_builder(draw)); } else { object_end(); } } return { story_: story_, text_: text_, image_: image_, table_: table_, barcode_: () => unsupported("stl:barcode"), chart_: () => unsupported("stl:chart"), fragment_: () => unsupported("stl:fragment"), text: unexpected_text, finalize: () => {} }; } function doc_builder() { function story_(start, attrs) { if (start) { if (root.contents) return unsupported("multiple stl:story"); var contents = factory.content(root, 4, attrs); return stl.handler_dispatcher(nsmap, story_builder(contents.m_pTextDraw)); } } function page_(start, attrs) { if (start) { if (root.contents) return unsupported("multiple stl:page"); var contents = factory.canvas(root, 1, attrs); return stl.handler_dispatcher(nsmap, item_builder(contents.m_DrawFront)); } } return { story_: story_, page_: page_, text: unexpected_text, finalize: () => {} }; } function root_builder() { function document_(start, attrs) { if (start) return stl.handler_dispatcher(nsmap, doc_builder()); } return { stl_: () => {}, data_: () => unsupported("stl:data"), fixtures_: () => unsupported("stl:fixtures"), style_: () => unsupported("stl:style"), document_: document_, text: unexpected_text, finalize: () => {} }; } return root_builder(); } ///////////////////////////////////////////////////////////////////////////////////////////////////////// /* * emp2stl( src: stream [, dst: stream, options: object] ) : stream * * Parses _Empower JSON_ fragment and generates corresponding * _STL_ fragment * * Parameters: * - `src` ... input stream containing _Empower JSON_ * - `dst` ... output stream to be filled with resulting _STL_ (memory stream is created by default) * - `options` ... following options are currently supported: * - `indent` ... bool, string or a function(tag, tags, is_start) used for indentation * - `page` ... bool determining whether page type should be generated * - `fonts` ... optional callback for font remap * - `uris` ... optional callback for URI remap * - `@return` ... output stream (the `dst` argument if provided, temporary memory stream otherwise) */ exports.emp2stl = function emp2stl(src, dst, options) { dst = dst || streams.stream(); options = options || {}; var indenter = options.indent; options.indent if (!util.isStream(src) || !util.isStream(dst)) { throw new Error("Invalid argument, stream expected"); } var contents = JSON.parse(src.read()).contents; var writer = stl_writer(dst, options.indent); build_stl(contents, writer, options); writer.finalize(); return dst; }; /* * stl2emp( src: stream [, dst: stream, options: object] ) : stream * * Parses _STL_ document and generates corresponding _Empower JSON_ fragment * * Parameters: * - `src` ... input stream containing _STL_ * - `dst` ... output stream to be filled with resulting _Empower JSON_ (memory stream is created by default) * - `options` ... following options are currently supported: * - `indent` ... bool or a string used for indentation * - `permissive` ... determines whether the conversion fails or ignores unsupported constructs * - `fonts` ... optional callback for font remap * - `uris` ... optional callback for URI remap * - `@return` ... output stream (the `dst` argument if provided, temporary memory stream otherwise) */ exports.stl2emp = function emp2stl(src, dst, options) { options = options || {}; var nsmap = stl.namespace_stack(); var factory = json_factory(options); var root = factory.root(); var builder = json_builder(nsmap, factory, root, options); var parser = stl.parser(nsmap, builder); parser.write(src.read()).close(); dst.write(JSON.stringify(root, null, options.indent)); return dst; };
Cleanup empower.js
docplatform/forsetup/js/tools/empower.js
Cleanup empower.js
<ide><path>ocplatform/forsetup/js/tools/empower.js <ide> exports.emp2stl = function emp2stl(src, dst, options) { <ide> dst = dst || streams.stream(); <ide> options = options || {}; <del> var indenter = options.indent; <del> options.indent <ide> <ide> if (!util.isStream(src) || !util.isStream(dst)) { <ide> throw new Error("Invalid argument, stream expected");
Java
apache-2.0
632a6724f26058b4a6919ecd241d7fdd2b8f0d13
0
jpkrohling/hawkular-inventory,metlos/hawkular-inventory,hawkular/hawkular-inventory,jpkrohling/hawkular-inventory,metlos/hawkular-inventory,hawkular/hawkular-inventory
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.rest.test; import static org.hawkular.inventory.api.Relationships.WellKnown.contains; import static org.hawkular.inventory.api.Relationships.WellKnown.defines; import static org.hawkular.inventory.api.Relationships.WellKnown.incorporates; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Spliterator; import java.util.Spliterators; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.hawkular.accounts.api.model.HawkularUser; import org.hawkular.accounts.api.model.Persona; import org.hawkular.inventory.api.model.Entity; import org.hawkular.inventory.api.model.Environment; import org.hawkular.inventory.api.model.Feed; import org.hawkular.inventory.api.model.Metric; import org.hawkular.inventory.api.model.MetricDataType; import org.hawkular.inventory.api.model.MetricType; import org.hawkular.inventory.api.model.MetricUnit; import org.hawkular.inventory.api.model.OperationType; import org.hawkular.inventory.api.model.Relationship; import org.hawkular.inventory.api.model.Resource; import org.hawkular.inventory.api.model.ResourceType; import org.hawkular.inventory.paths.CanonicalPath; import org.hawkular.inventory.paths.PathSegmentCodec; import org.hawkular.inventory.paths.SegmentType; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.squareup.okhttp.Response; /** * @author <a href="https://github.com/ppalaga">Peter Palaga</a> * */ public class InventoryITest extends AbstractTestBase { protected static final String basePath = "/hawkular/inventory"; protected static final String urlTypeId = "URL"; protected static final String testEnvId = "test"; protected static final String environmentId = "itest-env-" + UUID.randomUUID().toString(); protected static final String pingableHostRTypeId = "itest-pingable-host-" + UUID.randomUUID().toString(); protected static final String roomRTypeId = "itest-room-type-" + UUID.randomUUID().toString(); protected static final String copyMachineRTypeId = "itest-copy-machine-type-" + UUID.randomUUID().toString(); protected static final String date20150626 = "2015-06-26"; protected static final String date20160801 = "2016-08-01"; protected static final String expectedLifetime15years = "15y"; protected static final String facilitiesDept = "Facilities"; protected static final String itDept = "IT"; protected static final String typeVersion = "1.0"; protected static final String responseTimeMTypeId = "itest-response-time-" + UUID.randomUUID().toString(); protected static final String responseStatusCodeMTypeId = "itest-response-status-code-" + UUID.randomUUID().toString(); protected static final String statusDurationMTypeId = "status.duration.type"; protected static final String statusCodeMTypeId = "status.code.type"; protected static final String host1ResourceId = "itest-host1-" + UUID.randomUUID().toString(); protected static final String host2ResourceId = "itest-host2-" + UUID.randomUUID().toString(); protected static final String room1ResourceId = "itest-room1-" + UUID.randomUUID().toString(); protected static final String copyMachine1ResourceId = "itest-copy-machine1-" + UUID.randomUUID().toString(); protected static final String copyMachine2ResourceId = "itest-copy-machine2-" + UUID.randomUUID().toString(); protected static final String responseTimeMetricId = "itest-response-time-" + host1ResourceId; protected static final String responseStatusCodeMetricId = "itest-response-status-code-" + host1ResourceId; protected static final String feedId = "itest-feed-" + UUID.randomUUID().toString(); protected static final String bulkResourcePrefix = "bulk-resource-" + UUID.randomUUID().toString(); protected static final String bulkResourceTypePrefix = "bulk-resource-type-" + UUID.randomUUID().toString(); protected static final String bulkMetricTypePrefix = "bulk-metric-type-" + UUID.randomUUID().toString(); protected static final String customRelationName = "inTheSameRoom"; /* key is the path to delete while value is the path to GET to verify the deletion */ protected static Map<String, String> pathsToDelete = new LinkedHashMap<>(); protected static String tenantId; @BeforeClass public static void setupData() throws Throwable { Persona persona = getWithRetries("/hawkular/accounts/personas/current", HawkularUser.class, 10, 2000); tenantId = persona.getIdAsUUID().toString(); Assert.assertTrue("Cannot get the current tenant id.", tenantId != null && !tenantId.trim().isEmpty()); /* assert the test environment exists */ /* There is a race condition when WildFly agent is enabled: both this test and Agent trigger the autocreation of test entities simultaneously, and one of them may get only a partially initialized state. That is why we do several delayed attempts do perform the first request. */ String path = "/hawkular/inventory/environments/" + testEnvId; Environment env = getWithRetries(path, Environment.class, 10, 2000); assertEquals("Unable to get the '" + testEnvId + "' environment.", testEnvId, env.getId()); /* Create an environment that will be used exclusively by this test */ Response response = postDeletable("environments", Environment.Blueprint.builder().withId(environmentId).build()); assertEquals(201, response.code()); Environment environment = mapper.readValue(response.body().string(), Environment.class); assertEquals(environmentId, environment.getId()); assertEquals(CanonicalPath.of().tenant(tenantId).environment(environmentId).get(), environment.getPath()); assertEquals(baseURI + basePath + "/environments/" + environmentId, response.headers().get("Location")); /* URL resource type should have been autocreated */ path = basePath + "/resourceTypes/" + urlTypeId; ResourceType resourceType = getWithRetries(path, ResourceType.class, 10, 2000); assertEquals("Unable to get the '" + urlTypeId + "' resource type.", urlTypeId, resourceType.getId()); assertEquals(urlTypeId, resourceType.getId()); /* Create pingable host resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(pingableHostRTypeId).build()); assertEquals(201, response.code()); ResourceType pingableHost = mapper.readValue(response.body().string(), ResourceType.class); assertEquals(pingableHostRTypeId, pingableHost.getId()); assertEquals(baseURI + basePath +"/resourceTypes/" + pingableHostRTypeId, response.headers().get("Location")); /* Create room resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(roomRTypeId) .withProperty("expectedLifetime", expectedLifetime15years)// .withProperty("ownedByDepartment", facilitiesDept).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/resourceTypes/" + roomRTypeId, response.headers().get("Location")); /* Create copy machine resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(copyMachineRTypeId) .withProperty("expectedLifetime", expectedLifetime15years)// .withProperty("ownedByDepartment", itDept).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/resourceTypes/" + copyMachineRTypeId, response.headers().get("Location")); /* Create a metric type */ response = postDeletable("metricTypes", MetricType.Blueprint.builder(MetricDataType.COUNTER) .withId(responseTimeMTypeId)// .withUnit(MetricUnit.MILLISECONDS)// .withInterval(1L)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/metricTypes/" + responseTimeMTypeId, response.headers().get("Location")); /* Create another metric type */ response = postDeletable("metricTypes", MetricType.Blueprint.builder(MetricDataType.GAUGE) .withId(responseStatusCodeMTypeId)// .withUnit(MetricUnit.NONE)// .withInterval(1L)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/metricTypes/" + responseStatusCodeMTypeId, response.headers().get("Location")); /* link pingableHostRTypeId with responseTimeMTypeId and responseStatusCodeMTypeId */ path = basePath + "/resourceTypes/" + pingableHostRTypeId +"/metricTypes"; //just testing that both relative and canonical paths work when referencing the types response = post(path, "[\"../" + responseTimeMTypeId +"\", \"/" + responseStatusCodeMTypeId +"\"]"); assertEquals(204, response.code()); //we will try deleting the associations between resource types and metric types, too //this is not necessary because deleting either the resource type or the metric type will take care of it anyway //but this is to test that explicit deletes work, too // XXX this should check for removal of a single association. // OkHttp unconditionally canonicalizes the URL paths, which makes the below constructs impossible to send // over the wire using OkHttp (even though they're perfectly valid URLs). //pathsToDelete.put(path + "/../" + responseTimeMTypeId, path +"/../" + responseTimeMTypeId); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths //pathsToDelete.put(path + "/../" + responseStatusCodeMTypeId, path +"/../" + responseStatusCodeMTypeId); /* add a metric */ response = postDeletable(environmentId +"/metrics", Metric.Blueprint.builder() .withId(responseTimeMetricId) // .withMetricTypePath("../" + responseTimeMTypeId) // .build()); //path relative to env assertEquals(201, response.code()); Metric responseTimeMetric = mapper.readValue(response.body().string(), Metric.class); assertEquals(responseTimeMetricId, responseTimeMetric.getId()); assertEquals(baseURI + basePath +"/" + environmentId + "/metrics/" + responseTimeMetricId, response.headers().get("Location")); /* add another metric */ response = postDeletable(environmentId +"/metrics", Metric.Blueprint.builder() .withId(responseStatusCodeMetricId) // .withMetricTypePath("/" + responseStatusCodeMTypeId) // .build()); assertEquals(201, response.code()); Metric responseStatusCode = mapper.readValue(response.body().string(), Metric.class); assertEquals(responseStatusCodeMetricId, responseStatusCode.getId()); assertEquals(baseURI + basePath +"/" + environmentId + "/metrics/" + responseStatusCodeMetricId, response.headers().get("Location")); /* add a resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(host1ResourceId) // .withResourceTypePath("../" + pingableHostRTypeId) // .build()); assertEquals(201, response.code()); Resource host1Resource = mapper.readValue(response.body().string(), Resource.class); assertEquals(host1ResourceId, host1Resource.getId()); assertEquals(CanonicalPath.of().tenant(tenantId).environment(environmentId). resource(host1ResourceId).get(), host1Resource.getPath()); assertEquals(CanonicalPath.of().tenant(tenantId).resourceType(pingableHostRTypeId).get(), host1Resource.getType().getPath()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + host1ResourceId, response.headers().get("Location")); /* add another resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder()// .withId(host2ResourceId)// .withResourceTypePath("../" + pingableHostRTypeId)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + host2ResourceId, response.headers().get("Location")); /* add a room resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder().withId(room1ResourceId).withResourceTypePath("../" + roomRTypeId) .withProperty("purchaseDate", date20150626).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId, response.headers().get("Location")); /* add a copy machine resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(copyMachine1ResourceId) // .withResourceTypePath("../" + copyMachineRTypeId)// .withProperty("purchaseDate", date20150626)// .withProperty("nextMaintenanceDate", date20160801)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + copyMachine1ResourceId, response.headers().get("Location")); response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(copyMachine2ResourceId) // .withResourceTypePath("../" + copyMachineRTypeId) // .withProperty("purchaseDate", date20160801) // .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + copyMachine2ResourceId, response.headers().get("Location")); /* add child resources */ response = postDeletable(environmentId + "/resources/" + room1ResourceId, Resource.Blueprint.builder().withId("table").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg/1").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg%2F1", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg 2").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg%202", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg;3").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg;3", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg-4").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg-4", response.headers().get("Location")); //alternative child hierarchies response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder().withId("weapons").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/weapons", response.headers().get("Location")); path = basePath + "/" + environmentId + "/resources/weapons/children"; response = post(path, JsonNodeFactory.instance.arrayNode() // .add("/e;" + environmentId + "/r;" + room1ResourceId + "/r;table/r;leg%2F1") // .add("../" + room1ResourceId + "/table/leg-4") // .toString()); assertEquals(204, response.code()); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths // pathsToDelete.put(path + "/../table/leg%2F1", path + "/../table/leg%2F1") // pathsToDelete.put(path + "/../table/leg-4", path + "/../table/leg-4") /* link the metric to resource */ path = basePath + "/" + environmentId + "/resources/" + host1ResourceId +"/metrics"; response = post(path, JsonNodeFactory.instance.arrayNode() // .add("/e;"+ environmentId + "/m;"+ responseTimeMetricId) .add("/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId) .toString()); assertEquals(204, response.code()); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths // pathsToDelete.put(path + "/../" + responseTimeMetricId, path + "/../" + responseTimeMetricId); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths //pathsToDelete.put(path + "/../" + responseStatusCodeMetricId, path + "/../" + responseStatusCodeMetricId); /* add a feed */ response = postDeletable("feeds", Feed.Blueprint.builder().withId(feedId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/feeds/" + feedId, response.headers().get("Location")); /* add a custom relationship, no need to clean up, it'll be deleted together with the resources */ Map<String, Object> properties = new LinkedHashMap<>(); properties.put("from", "2000-01-01"); properties.put("confidence", "90%"); CanonicalPath src = CanonicalPath.fromString("/t;" + tenantId + "/e;" + environmentId + "/r;" + host2ResourceId); CanonicalPath target = CanonicalPath.fromString("/t;" + tenantId + "/e;" + environmentId + "/r;" + host1ResourceId); Relationship h1h2Rel = new Relationship("42", customRelationName, src, target, properties); response = postNew(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/relationships", h1h2Rel); assertEquals(201, response.code()); JsonNode h1h2Json = mapper.readTree(response.body().string()); assertEquals(customRelationName, h1h2Json.get("name").asText()); // relationship with tenant CanonicalPath tenantPath = CanonicalPath.fromString("/t;" + tenantId); Relationship tenantRel = new Relationship(UUID.randomUUID().toString(), "sampleRelationship", tenantPath, tenantPath); post(basePath + "/tenants/relationships", mapper.writeValueAsString(tenantRel)); assertEquals(201, response.code()); // add operation type to the resource type response = postDeletable("resourceTypes/" + pingableHostRTypeId +"/operationTypes", OperationType.Blueprint.builder().withId("start").build()); assertEquals(201, response.code()); response = postDeletable("resourceTypes/" + pingableHostRTypeId +"/operationTypes", OperationType.Blueprint.builder().withId("stop").build()); assertEquals(201, response.code()); // add some parameters to it String startOpParamTypes = "{" // + "\"role\" : \"parameterTypes\"," // + "\"value\": {" // + "\"title\" : \"blah\"," // + "\"type\": \"object\"," // + "\"properties\": { \"quick\": { \"type\": \"boolean\"}}" // + "}" // + "}"; response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", startOpParamTypes); assertEquals(201, response.code()); response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", "{\"role\": \"returnType\", \"value\": {\"title\": \"blah\", \"type\": \"boolean\"}}"); assertEquals(201, response.code()); /* add a resource type json schema */ String schema = "{" + "\"value\": {" // + "\"title\" : \"Character\"," // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"firstName\" : {\"type\": \"string\"}," // + "\"secondName\": {\"type\": \"string\"}," // + "\"age\" : {" // + "\"type\" : \"integer\"," // + "\"minimum\" : 0," // + "\"exclusiveMinimum\": false" // + "}," // + "\"male\" : {" // + "\"description\": \"true if the character is a male\"," // + "\"type\" : \"boolean\"" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"something\": {\"type\": \"string\"}," // + "\"someArray\": {" // + "\"type\" : \"array\"," // + "\"minItems\" : 3," // + "\"items\" : {\"type\": \"integer\"}," // + "\"uniqueItems\": false" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"question\": {" // + "\"type\" : \"string\"," // + "\"pattern\": \"^.*\\\\?$\"" // + "}," // + "\"answer\" : {" // + "\"description\": \"the answer (example of any type)\"" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"foo\": {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"fear\" : {" // + "\"type\": \"string\"," // + "\"enum\": [\"dentists\", \"lawyers\", \"rats\"]" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}," // + "\"required\" : [\"firstName\", \"secondName\", \"male\", \"age\", \"foo\"]" // + "}," // + "\"role\" : \"configurationSchema\"" // + "}"; response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/data", schema); assertEquals(201, response.code()); /* add an invalid config data to a resource (invalid ~ not valid against the json schema) */ String invalidData = "{" // + "\"value\": {" // + "\"firstName\": \"John\"," // + "\"secondName\": \"Smith\"" // + "}," // + "\"role\" : \"configuration\"" // + "}"; response = post(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/data", invalidData); assertEquals(400, response.code()); /* add a config data to a resource, no need to clean up, it'll be deleted together with the resources */ String data = "{" // + "\"value\" : {" // + "\"firstName\" : \"Winston\"," // + "\"secondName\": \"Smith\"," // + "\"sdf\" : \"sdf\"," // + "\"male\" : true," // + "\"age\" : 42," // + "\"foo\" : {" // + "\"something\": \"whatever\"," // + "\"someArray\": [1, 1, 2, 3, 5, 8]," // + "\"foo\" : {" // + "\"answer\" : 5," // + "\"question\": \"2+2=?\"," // + "\"foo\" : {" // + "\"foo\": {" // + "\"fear\": \"rats\"" // + "}" // + "}" // + "}" // + "}" // + "}," // + "\"role\" : \"configuration\"," // + "\"properties\": {" // + "\"war\" : \"peace\"," // + "\"freedom\" : \"slavery\"," // + "\"ignorance\": \"strength\"" // + "}" // + "}"; response = post(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/data", data); assertEquals(201, response.code()); //add resource-owner metric response = postDeletable(environmentId +"/resources/" + host2ResourceId +"/metrics", Metric.Blueprint.builder() // .withId("resource-owned-metric") // .withMetricTypePath("/"+responseTimeMTypeId) // .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId +"/resources/" + host2ResourceId +"/metrics/resource-owned-metric", response.headers().get("Location")); } @AfterClass public static void deleteEverything() throws IOException { /* the following would delete all data of the present user. We cannot do that as long as we do not have * a dedicated user for running this very single test class. */ // Response response = client.delete(path : basePath + "/tenant") // assertEquals(204, response.code()) /* Let's delete the entities one after another in the reverse order as we created them */ List<Map.Entry<String, String>> entries = new ArrayList<Map.Entry<String, String>>(pathsToDelete.entrySet()); Collections.reverse(entries); for (Map.Entry<String, String> en : entries) { String path = en.getKey(); String getValidationPath = en.getValue(); Response response = client.newCall(newAuthRequest().url(baseURI + path).delete().build()).execute(); assertEquals( "Could not delete path [" + baseURI + path + "]: " + response.body().string(), 204, response.code()); if (getValidationPath != null) { response = client.newCall(newAuthRequest().url(baseURI + path).build()).execute(); assertEquals("The path " + getValidationPath + " should not exist after the entity was deleted: " + response.body().string(), 404, response.code()); } } } @Test public void ping() throws Throwable { Response response = get(basePath + ""); assertEquals(200, response.code()); } @Test public void testEnvironmentsCreated() throws Throwable { assertEntitiesExist("environments", "/e;"+ testEnvId, "/e;"+ environmentId); } @Test public void testResourceTypesCreated() throws Throwable { assertEntityExists("resourceTypes/" + urlTypeId, "/rt;" + urlTypeId); assertEntityExists("resourceTypes/" + pingableHostRTypeId, "/rt;" + pingableHostRTypeId); assertEntityExists("resourceTypes/" + roomRTypeId, "/rt;" + roomRTypeId); // commented out as it interfers with WildFly Agent // assertEntitiesExist("resourceTypes", [urlTypeId, pingableHostRTypeId, roomRTypeId]) } @Test public void testMetricTypesCreated() throws Throwable { assertEntityExists("metricTypes/" + responseTimeMTypeId, "/mt;" + responseTimeMTypeId); assertEntityExists("metricTypes/" + statusDurationMTypeId, "/mt;" + statusDurationMTypeId); assertEntityExists("metricTypes/" + statusCodeMTypeId, "/mt;" + statusCodeMTypeId); // commented out as it interfers with WildFly Agent // assertEntitiesExist("metricTypes", // [responseTimeMTypeId, responseStatusCodeMTypeId, statusDurationMTypeId, statusCodeMTypeId]) } @Test public void testOperationTypesCreated() throws Throwable { Response response = get(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); assertEntityExists("resourceTypes/" + pingableHostRTypeId +"/operationTypes/start", "/rt;" + pingableHostRTypeId + "/ot;start"); assertEntityExists("resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", new String[] {"dataType", "returnType"}, "/rt;" + pingableHostRTypeId + "/ot;start/d;returnType"); assertEntityExists("resourceTypes/" + pingableHostRTypeId + "/operationTypes/start/data", new String[] { "dataType", "parameterTypes" }, "/rt;" + pingableHostRTypeId + "/ot;start/d;parameterTypes"); } @Test public void testMetricTypesLinked() throws Throwable { assertEntitiesExist("resourceTypes/" + pingableHostRTypeId +"/metricTypes", "/mt;" + responseTimeMTypeId, "/mt;" + responseStatusCodeMTypeId); } @Test public void testResourcesCreated() throws Throwable { assertEntityExists(environmentId + "/resources/" + host1ResourceId, "/e;" + environmentId + "/r;" + host1ResourceId); assertEntityExists(environmentId + "/resources/" + host2ResourceId, "/e;" + environmentId + "/r;" + host2ResourceId); assertEntityExists(environmentId + "/resources/" + room1ResourceId, "/e;" + environmentId + "/r;" + room1ResourceId); } @Test public void testResourcesFilters() throws Throwable { /* filter by resource properties */ Response response = get(basePath + "/" + environmentId +"/resources", "properties", "purchaseDate:" + date20150626, "sort", "id"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); assertEquals(room1ResourceId, json.get(1).get("id").asText()); response = get(basePath + "/" + environmentId +"/resources", "properties", "nextMaintenanceDate:"+ date20160801); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); /* query by two props at once */ response = get(basePath + "/" + environmentId +"/resources", "properties", "nextMaintenanceDate:" +date20160801 +",purchaseDate:" +date20150626); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); /* query by property existence */ response = get(basePath + "/" + environmentId +"/resources", "properties", "purchaseDate", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(3, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); assertEquals(copyMachine2ResourceId, json.get(1).get("id").asText()); assertEquals(room1ResourceId, json.get(2).get("id").asText()); /* filter by type */ response = get(basePath + "/" + environmentId +"/resources", "type.id", pingableHostRTypeId); json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); response = get(basePath + "/" + environmentId +"/resources", "type.id", roomRTypeId, "type.version", typeVersion); json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); } @Test public void testMetricsCreated() throws Throwable { assertEntityExists(environmentId +"/metrics/" + responseTimeMetricId, "/e;"+ environmentId + "/m;"+ responseTimeMetricId); assertEntityExists(environmentId +"/metrics/" + responseStatusCodeMetricId, "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId); assertEntitiesExist(environmentId +"/metrics", "/e;"+ environmentId + "/m;"+ responseTimeMetricId, "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId, "/e;"+ environmentId + "/r;"+ host2ResourceId + "/m;resource-owned-metric"); } @Test public void testMetricsLinked() throws Throwable { assertEntitiesExist(environmentId +"/resources/" + host1ResourceId +"/metrics", "/e;" + environmentId + "/m;" + responseTimeMetricId, "/e;" + environmentId + "/m;" + responseStatusCodeMetricId); } @Test public void testConfigCreated() throws Throwable { assertEntityExists(environmentId +"/resources/" + host2ResourceId +"/data", "/e;" + environmentId + "/r;" + host2ResourceId + "/d;configuration"); // assertEntitiesExist(environmentId +"/resources/" // + host2ResourceId%2Ftable/data?dataType=connectionConfiguration", // ["/e;" + environmentId + "/r;" + host2ResourceId + "/d;connectionConfiguration"]) } @Test public void testPaging() throws Throwable { String path = basePath + "/" + environmentId +"/resources"; Response response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "2", "sort", "id"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); JsonNode first = json.get(0); JsonNode second = json.get(1); response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "1", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(first, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "1", "per_page", "1", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(second, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "1", "sort", "id", "order", "desc"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(second, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "1", "per_page", "1", "sort", "id", "order", "desc"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(first, json.get(0)); } @Test public void testTenantsContainEnvironments() throws Throwable { assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + ""); assertRelationshipJsonldExists("tenant/relationships", tenantId, contains.name(), environmentId); } @Test public void testTenantsContainResourceTypes() throws Throwable { assertRelationshipExists("resourceTypes/" + urlTypeId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/rt;"+ urlTypeId + ""); assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/rt;" +pingableHostRTypeId); } @Test public void testTenantsContainMetricTypes() throws Throwable { assertRelationshipExists("metricTypes/" + responseTimeMTypeId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/mt;" + responseTimeMTypeId); assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/mt;" + statusCodeMTypeId); } @Test public void testEnvironmentsContainResources() throws Throwable { assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + ""); assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + ""); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), host1ResourceId); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), host2ResourceId); } @Test public void testTenantsContainFeeds() throws Throwable { assertRelationshipExists("feeds/" + feedId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/f;"+ feedId + ""); assertRelationshipJsonldExists("feeds/" + feedId +"/relationships", tenantId, contains.name(), feedId); } @Test public void testEnvironmentsContainMetrics() throws Throwable { assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseTimeMetricId + ""); assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId + ""); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), responseTimeMetricId); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), responseStatusCodeMetricId); } @Test public void testResourceTypesIncorporatesMetricTypes() throws Throwable { assertRelationshipExists("resourceTypes/" + pingableHostRTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, incorporates.name(), "/t;"+ tenantId + "/mt;" + responseTimeMTypeId); assertRelationshipExists("metricTypes/" + responseStatusCodeMTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, incorporates.name(), "/t;"+ tenantId + "/mt;" + responseStatusCodeMTypeId); assertRelationshipJsonldExists("resourceTypes/" + pingableHostRTypeId +"/relationships", pingableHostRTypeId, incorporates.name(), responseTimeMTypeId); } @Test public void testResourcesIncorporatesMetrics() throws Throwable { assertRelationshipExists(environmentId +"/resources/" + host1ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", incorporates.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId + ""); assertRelationshipExists(environmentId +"/resources/" + host1ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", incorporates.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseTimeMetricId + ""); assertRelationshipJsonldExists(environmentId +"/resources/" + host1ResourceId +"/relationships", host1ResourceId, incorporates.name(), responseTimeMetricId); } @Test public void testResourceTypesDefinesResources() throws Throwable { assertRelationshipExists("resourceTypes/" + pingableHostRTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, defines.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + ""); } @Test public void testMetricTypesDefinesMetrics() throws Throwable { assertRelationshipJsonldExists("metricTypes/" + responseStatusCodeMTypeId +"/relationships", responseStatusCodeMTypeId, defines.name(), responseStatusCodeMetricId); assertRelationshipJsonldExists("metricTypes/" + responseTimeMTypeId +"/relationships", responseTimeMTypeId, defines.name(), responseTimeMetricId); } @Test public void testCustomRelationship() throws Throwable { assertRelationshipJsonldExists(environmentId +"/resources/" + host2ResourceId +"/relationships", host2ResourceId, customRelationName, host1ResourceId); } @Test public void testRelationshipFiltering() throws Throwable { assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "property", "from", "propertyValue", "2000-01-01"); assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "property", "confidence", "propertyValue", "90%"); assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "named", customRelationName); } @Test public void testResourceHierarchyQuerying() throws Throwable { assertEntitiesExist(environmentId +"/resources/" + room1ResourceId +"/children", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table"); String base = "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table"; assertEntitiesExist(environmentId +"/resources/" + room1ResourceId +"/table/children", base + "/r;leg%2F1", base + "/r;leg%202", base + "/r;leg;3", base + "/r;leg-4"); assertEntitiesExist(environmentId +"/resources/weapons/children", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table/r;leg%2F1", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table/r;leg-4"); } @Test @Ignore public void testResourceBulkCreate() throws Throwable { StringBuilder payload = new StringBuilder("{\"/e;test\": {\"resource\": ["); for (int i = 0; i < 100; i++) { payload.append("{ \"id\": \"" + bulkResourcePrefix + "-" + i + "\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"}"); if (i != 0) { payload.append(","); } } payload.append("]}}"); Response response = post(basePath + "/bulk", payload.toString()); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); assertEquals(100, json.size()); for (Iterator<Entry<String, JsonNode>> it = json.fields(); it.hasNext(); ) { Entry<String, JsonNode> en = it.next(); CanonicalPath p = CanonicalPath.fromString(en.getKey()); String env = p.ids().getEnvironmentId(); String rid = p.ids().getResourcePath().getSegment().getElementId(); delete(basePath + "/" + env +"/resources/" + rid); } } @Test public void testResourceBulkCreateUnderFeedWithDuplicates() throws Throwable { String pathToResType = "/t;"+ tenantId + "/f;"+ feedId + "/rt;"+ bulkResourceTypePrefix + ".1"; String payload = "{" + " \"/t;"+ tenantId + "/f;"+ feedId + "\": {" // + " \"resourceType\": [" // + " {" // + " \"id\": \""+ bulkResourceTypePrefix +".1\"" // + " }," // + " {" // + " \"id\": \""+ bulkResourceTypePrefix +".1\"" // + " }" // + " ]," // + " \"resource\": [" // + " {" // + " \"id\" : \""+ bulkResourcePrefix + ".1\"," // + " \"resourceTypePath\": \""+ pathToResType + "\"" // + " }," // + " {" // + " \"id\" : \""+ bulkResourcePrefix + ".2\"," // + " \"resourceTypePath\": \""+ pathToResType + "\"" // + " }" // + " ]," // + " \"metricType\": [" // + " {" // + " \"id\" : \""+ bulkMetricTypePrefix + ".1\"," // + " \"unit\" : \"BYTES\"," // + " \"type\" : \"GAUGE\"," // + " \"collectionInterval\": \"300\"" // + " }," // + " {" // + " \"id\" : \""+ bulkMetricTypePrefix + ".2\"," // + " \"unit\" : \"BYTES\"," // + " \"type\" : \"GAUGE\"," // + " \"collectionInterval\": \"300\"" // + " }" // + " ]" // + " }," // + " \"" + pathToResType +"\": {" // + " \"relationship\": [" // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".1\"," // + " \"direction\": \"outgoing\"" // + " }," // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".1\"," // + " \"direction\": \"outgoing\"" // + " }," // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".2\"," // + " \"direction\": \"outgoing\"" // + " }" // + " ]" // + " }" // + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode resourceCodes = json.get("resource"); JsonNode metricTypeCodes = json.get("metricType"); JsonNode resourceTypeCodes = json.get("resourceType"); JsonNode relationshipCodes = json.get("relationship"); // check, there are no dupes assertEquals(2, resourceCodes.size()); assertEquals(2, metricTypeCodes.size()); assertEquals(1, resourceTypeCodes.size()); assertEquals(2, relationshipCodes.size()); // check, no 409 was raised, because only the first status code is taken assertEquals(201, resourceCodes.get("/t;" + tenantId + "/f;" + feedId + "/r;" + bulkResourcePrefix + ".1") .asInt()); assertEquals(201, resourceCodes.get("/t;" + tenantId + "/f;" + feedId + "/r;" + bulkResourcePrefix + ".2") .asInt()); assertEquals(201, resourceTypeCodes.get(pathToResType).asInt()); assertEquals(201, metricTypeCodes.get("/t;" + tenantId + "/f;" + feedId + "/mt;" + bulkMetricTypePrefix + ".1").asInt()); assertEquals(201, metricTypeCodes.get("/t;" + tenantId + "/f;" + feedId + "/mt;" + bulkMetricTypePrefix + ".2").asInt()); assertEquals(201, relationshipCodes.get("/rl;" + PathSegmentCodec.encode(pathToResType + "-(incorporates)->" + "/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + "" + ".1")).asInt()); assertEquals(201, relationshipCodes.get("/rl;" + PathSegmentCodec.encode(pathToResType + "-(incorporates)->" + "/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + "" + ".2")).asInt()); delete(basePath + "/feeds/" + feedId +"/resources/" + bulkResourcePrefix + ".1"); delete(basePath + "/feeds/" + feedId +"/metricTypes/" + bulkMetricTypePrefix + ".1"); delete(basePath + "/feeds/" + feedId +"/metricTypes/" + bulkMetricTypePrefix + ".2"); // client.delete(path: basePath + "/feeds/" + feedId +"/resourceTypes/" + bulkResourceTypePrefix" + ".1"); } @Test public void testResourceBulkCreateWithErrors() throws Throwable { StringBuilder payload = new StringBuilder("{\"/e;" + environmentId + "\": {\"resource\": ["); //this should fail payload.append("{\"id\": \"" + room1ResourceId + "\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"},"); //this should succeed payload.append("{\"id\": \"" + bulkResourcePrefix + "-1\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"}"); payload.append("]}}"); Response response = post(basePath + "/bulk", payload.toString()); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode codes = json.get("resource"); assertEquals(2, codes.size()); assertEquals(409, codes.get("/t;" + tenantId + "/e;" + environmentId + "/r;" + room1ResourceId).asInt()); assertEquals(201, codes.get("/t;" + tenantId + "/e;" + environmentId + "/r;" + bulkResourcePrefix + "-1").asInt()); delete(basePath + "/" + environmentId +"/resources/" + bulkResourcePrefix + "-1"); } @Test public void testBulkCreateAndRelate() throws Throwable { String epath = "/t;"+ tenantId + "/e;"+ environmentId; String rpath = epath +"/r;" + bulkResourcePrefix + "-1"; String mpath = epath +"/m;"+ responseTimeMetricId + ""; String payload = "{" // + "\"" + epath + "\": {" // + "\"resource\": [" // + "{" // + "\"id\": \"" + bulkResourcePrefix + "-1\"," // + "\"resourceTypePath\": \"/rt;" + roomRTypeId + "\"" // + "}" // + "]" // + "}," + "\"" + rpath + "\": {" // + "\"relationship\" : [" // + "{" // + "\"name\": \"incorporates\"," // + "\"otherEnd\": \"" + mpath + "\"," // + "\"direction\": \"outgoing\"" // + "}" // + "]" // + "}" // + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode resourceCodes = json.get("resource"); JsonNode relationshipCodes = json.get("relationship"); assertEquals(1, resourceCodes.size()); assertEquals(201, resourceCodes.get(rpath).asInt()); assertEquals(1, relationshipCodes.size()); assertEquals(201, relationshipCodes.fields().next().getValue().asInt()); // TODO : find out if this returning 404 instead of 204 is a bug or feature //delete(basePath + "/" + environmentId + "/resources/" + bulkResourcePrefix + "-1/metrics/../" // + responseTimeMetricId); delete(basePath + "/" + environmentId +"/resources/" + bulkResourcePrefix +"-1"); } @Test public void testComplexBulkCreate() throws Throwable { String env1 = "bulk-env-" + UUID.randomUUID().toString(); String env2 = "bulk-env-" + UUID.randomUUID().toString(); String rt1 = "bulk-URL" + UUID.randomUUID().toString(); String rt2 = "bulk-URL2" + UUID.randomUUID().toString(); String mt1 = "bulk-ResponseTime" + UUID.randomUUID().toString(); String payload = "{"// + " \"/t;"+ tenantId + "\": {"// + " \"environment\": ["// + " {"// + " \"id\": \"" + env1 + "\","// + " \"properties\": {\"key\": \"value\"},"// + " \"outgoing\": {"// + " \"customRel\": [\"/t;"+ tenantId + "\"]"// + " }"// + " },"// + " {"// + " \"id\": \""+ env2 +"\","// + " \"properties\": {\"key\": \"value2\"}"// + " }"// + " ],"// + " \"resourceType\": ["// + " {"// + " \"id\": \"" + rt1 +"\""// + " },"// + " {"// + " \"id\": \""+ rt2 +"\""// + " }"// + " ],"// + " \"metricType\": ["// + " {"// + " \"id\": \""+ mt1 +"\","// + " \"type\": \"GAUGE\","// + " \"unit\": \"MILLISECONDS\","// + " \"collectionInterval\": \"1\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/rt;" + rt1 + "\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"configurationSchema\","// + " \"value\": {"// + " \"title\": \"URL config schema\","// + " \"description\": \"A json schema describing configuration of an URL\","// + " \"type\": \"string\""// + " }"// + " }"// + " ],"// + " \"operationType\": ["// + " {"// + " \"id\": \"ping\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/rt;" + rt2 + "\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"connectionConfigurationSchema\","// + " \"value\": {"// + " \"title\": \"URL2 connection config schema\","// + " \"description\": \"A json schema describing connection to an URL\","// + " \"type\": \"string\""// + " }"// + " }"// + " ],"// + " \"operationType\": ["// + " {"// + " \"id\": \"ping-pong\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/e;" + env1 + "\": {"// + " \"resource\": ["// + " {"// + " \"id\": \"url1\","// + " \"resourceTypePath\": \"/t;"+ tenantId + "/rt;" + rt1 +"\""// + " }"// + " ],"// + " \"metric\": ["// + " {"// + " \"id\": \"url1_responseTime\","// + " \"metricTypePath\": \"/t;"+ tenantId + "/mt;"+ mt1 +"\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/e;" + env1 +"/r;url1\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"configuration\","// + " \"value\": \"http://redhat.com\""// + " }"// + " ],"// + " \"relationship\": ["// + " {"// + " \"name\": \"incorporates\","// + " \"otherEnd\": \"/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime\","// + " \"direction\": \"outgoing\""// + " }"// + " ]"// + " }"// + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode environmentCodes = json.get("environment"); JsonNode resourceTypeCodes = json.get("resourceType"); JsonNode metricTypeCodes = json.get("metricType"); JsonNode dataCodes = json.get("dataEntity"); JsonNode operationTypeCodes = json.get("operationType"); JsonNode resourceCodes = json.get("resource"); JsonNode metricCodes = json.get("metric"); JsonNode relationshipCodes = json.get("relationship"); //now make a second call, this time only create a metadata pack. //this has to be done in two requests, because the resource types need to be fully populated before they can //be put into the pack because afterwards they're frozen payload = "{"// + " \"/t;"+ tenantId + "\": {"// + " \"metadataPack\": ["// + " {"// + " \"members\": [\"/t;"+ tenantId + "/rt;" + rt1 + "\", \"/t;"+ tenantId + "/rt;" + rt2 +"\","// + " \"/t;"+ tenantId + "/mt;"+ mt1 +"\"]"// + " }"// + " ]"// + " }"// + "}"; response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); json = mapper.readTree(response.body().string()); JsonNode metadataPackCodes = json.get("metadataPack"); assertEquals(2, environmentCodes.size()); assertEquals(201, environmentCodes.get("/t;"+ tenantId + "/e;" + env1).asInt()); assertEquals(201, environmentCodes.get("/t;"+ tenantId + "/e;" + env2).asInt()); assertEquals(2, resourceTypeCodes.size()); assertEquals(201, resourceTypeCodes.get("/t;"+ tenantId + "/rt;" + rt1).asInt()); assertEquals(201, resourceTypeCodes.get("/t;"+ tenantId + "/rt;" + rt2).asInt()); assertEquals(1, metricTypeCodes.size()); assertEquals(201, metricTypeCodes.get("/t;"+ tenantId + "/mt;" + mt1).asInt()); assertEquals(3, dataCodes.size()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/rt;" + rt1 +"/d;configurationSchema").asInt()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/rt;" + rt2 +"/d;connectionConfigurationSchema").asInt()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/e;" + env1 +"/r;url1/d;configuration").asInt()); assertEquals(2, operationTypeCodes.size()); assertEquals(201, operationTypeCodes.get("/t;"+ tenantId + "/rt;" + rt1 +"/ot;ping").asInt()); assertEquals(201, operationTypeCodes.get("/t;"+ tenantId + "/rt;" + rt2 +"/ot;ping-pong").asInt()); assertEquals(1, resourceCodes.size()); assertEquals(201, resourceCodes.get("/t;"+ tenantId + "/e;" + env1 +"/r;url1").asInt()); assertEquals(1, metricCodes.size()); assertEquals(201, metricCodes.get("/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime").asInt()); assertEquals(1, relationshipCodes.size()); assertEquals(201, relationshipCodes.fields().next().getValue().asInt()); assertEquals(1, metadataPackCodes.size()); assertEquals(201, metadataPackCodes.fields().next().getValue().asInt()); response = get(basePath + "/" + env1 +"/resources/url1/metrics"); json = mapper.readTree(response.body().string()); assertEquals("/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime", json.get(0).get("path").asText()); String mpPath = metadataPackCodes.fields().next().getKey(); String mpId = mpPath.substring(mpPath.lastIndexOf(";") + 1); delete(basePath + "/metadatapacks/" + mpId); delete(basePath + "/environments/" + env1); delete(basePath + "/environments/" + env2); delete(basePath + "/resourceTypes/" + rt1); delete(basePath + "/metricTypes/" + mt1); } @Test public void testMetadataPacks() throws Throwable { Response response = post(basePath + "/metadatapacks", "{ \"members\": [\"/t;" + tenantId + "/rt;" + urlTypeId + "\"]}"); JsonNode json = mapper.readTree(response.body().string()); String mpId = json.get("id").asText(); String url = baseURI + basePath + "/resourceTypes/" + urlTypeId; response = client.newCall(newAuthRequest().url(url).delete().build()).execute(); assertEquals("Deleting a resource type that is part of metadatapack should not be possible.", 400, response.code()); delete(basePath + "/metadatapacks/" + mpId); } @Test public void testRecursiveChildren() throws Throwable { try { Response response = post(basePath + "/" + environmentId +"/resources", "{ \"id\": \"rootResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}"); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource", "{\"id\": \"childResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}" ); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource/childResource", "{\"id\": \"grandChildResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}"); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource/childResource", "{\"id\": \"grandChildResource2\", \"resourceTypePath\": \"/" + roomRTypeId + "\"}"); assertEquals(201, response.code()); response = get(basePath + "/" + environmentId +"/resources/rootResource/recursiveChildren", "typeId", urlTypeId); JsonNode ret = mapper.readTree(response.body().string()); assertEquals(2, ret.size()); Assert.assertTrue(toStream(ret).anyMatch(node -> "childResource".equals(node.get("id").asText()))); Assert.assertTrue(toStream(ret).anyMatch(node -> "grandChildResource".equals(node.get("id").asText()))); response = get(basePath + "/" + environmentId +"/resources/rootResource/recursiveChildren", "typeId", roomRTypeId); ret = mapper.readTree(response.body().string()); assertEquals(1, ret.size()); Assert.assertTrue(toStream(ret).anyMatch(node -> "grandChildResource2".equals(node.get("id").asText()))); } finally { delete(basePath + "/" + environmentId +"/resources/rootResource"); } } @Test public void testSync() throws Throwable { String structure = "{"// + "\"type\": \"feed\","// + "\"data\": {"// + " \"id\": \"sync-feed\""// + "},"// + "\"children\": {"// + " \"resource\": ["// + " {"// + " \"data\": {"// + " \"id\": \"resource\","// + " \"resourceTypePath\": \"resourceType\""// + " },"// + " \"children\": {"// + " \"resource\": ["// + " {"// + " \"data\": {"// + " \"id\": \"childResource\","// + " \"resourceTypePath\": \"../resourceType\""// + " }"// + " }"// + " ]"// + " }"// + " }"// + " ],"// + " \"resourceType\": ["// + " {"// + " \"data\": {"// + " \"id\": \"resourceType\","// + " \"name\": \"My Resource Type With A Friendly Name\""// + " }"// + " }"// + " ],"// + " \"metric\": ["// + " {"// + " \"data\": {"// + " \"id\": \"metric\","// + " \"metricTypePath\": \"metricType\","// + " \"collectionInterval\": 0"// + " }"// + " }"// + " ],"// + " \"metricType\": ["// + " {"// + " \"data\": {"// + " \"id\": \"metricType\","// + " \"type\": \"GAUGE\","// + " \"unit\": \"NONE\","// + " \"collectionInterval\": 0,"// + " \"name\": \"My Metric Type With A Friendly Name\""// + " }"// + " }"// + " ]"// + "}"// + "}"; try { Response response = post(basePath + "/feeds", "{\"id\": \"sync-feed\"}"); assertEquals(201, response.code()); response = post(basePath + "/feeds/sync-feed/resourceTypes", "{\"id\": \"doomed\"}"); assertEquals(201, response.code()); //check that the doomed resource type is there response = get(basePath + "/path/f;sync-feed/rt;doomed"); assertEquals(200, response.code()); response = post(basePath + "/sync/f;sync-feed", structure); assertEquals(204, response.code()); //check that stuff is there response = get(basePath + "/path/f;sync-feed"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/r;resource"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/r;resource/r;childResource"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/rt;resourceType"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/mt;metricType"); assertEquals(200, response.code()); //check that the doomed resource type is gone, because it was not part of the payload from the feed response = get(basePath + "/path/f;sync-feed/rt;doomed"); assertEquals(404, response.code()); } finally { Response response = get(basePath + "/path/f;sync-feed"); if (response.code() == 200) { delete(basePath + "/feeds/sync-feed"); } } } protected static void assertEntityExists(String path, String cp) throws Throwable { assertEntityExists(path, new String[0], cp); } protected static void assertEntityExists(String path, String[] queryParams, String cp) throws Throwable { Response response = get(basePath + "/" + path, queryParams); assertEquals(200, response.code()); JsonNode json = mapper.readTree(response.body().string()); assertEquals(fullCanonicalPath(cp), json.get("path").asText()); } protected static void assertEntitiesExist(String path, String... cps) throws Throwable { List<String> expectedPaths = Arrays.stream(cps).map(cp -> fullCanonicalPath(cp)).collect(Collectors.toList()); Response response = get(basePath + "/" + path); JsonNode json = mapper.readTree(response.body().string()); List<String> entityPaths = toStream(json).map(node -> node.get("path").asText()).collect(Collectors.toList()); for (Iterator<String> it = expectedPaths.iterator(); it.hasNext();) { String cp = it.next(); if (entityPaths.remove(cp)) { it.remove(); } } Assert.assertTrue("Unexpected entities with paths: " + entityPaths, entityPaths.isEmpty()); Assert.assertTrue("Following entities not found: " + expectedPaths, expectedPaths.isEmpty()); } protected static Stream<JsonNode> toStream(JsonNode json) { Spliterator<JsonNode> jsonSpliterator = Spliterators.spliteratorUnknownSize(json.elements(), 0); return StreamSupport.stream(jsonSpliterator, false); } protected static void assertRelationshipJsonldExists(String path, String source, String label, String target) throws Throwable { Response response = get(basePath + "/" + path, "jsonld", "true"); JsonNode json = mapper.readTree(response.body().string()); boolean found = toStream(json) .anyMatch(node -> source.equals(node.get("source").get("shortId").asText()) && label.equals(node.get("name").asText()) && target.equals(node.get("target").get("shortId").asText())); Assert.assertTrue("Following edge not found: source: "+ source +", name: "+ label +", target: " + target, found); } protected static void assertRelationshipExists(final String path, final String source, final String label, String target, String... query) throws Throwable { Response response = get(basePath + "/" + path, query); List<Relationship> rels = mapper.readValue(response.body().string(), new TypeReference<List<Relationship>>(){}); Assert.assertTrue("Following Relationship not found: " + source +", " + label + ", " + target, rels.stream().anyMatch( r -> source.equals(r.getSource().toString()) && label.equals(r.getName()) && target.equals(r.getTarget().toString()) ) ); } /* Add the deletable path to {@link #pathsToDelete} and send a {@code POST} request using the given map of * arguments. */ protected static Response postDeletable(String path, Entity.Blueprint blueprint) throws Throwable { String getVerificationPath = path + "/" + PathSegmentCodec.encode(blueprint.getId()); return postDeletable(path, blueprint, getVerificationPath); } protected static Response postDeletable(String path, Entity.Blueprint blueprint, String getVerificationPath) throws Throwable { String postPath = basePath + "/" + path; String key = postPath + "/" + PathSegmentCodec.encode(blueprint.getId()); pathsToDelete.put(key, basePath + "/" + getVerificationPath); return postNew(postPath, blueprint); } protected static String fullCanonicalPath(String cp) { return CanonicalPath.fromPartiallyUntypedString(cp, CanonicalPath.of().tenant(tenantId).get(), SegmentType.ANY_ENTITY) .toString(); } }
hawkular-inventory-itest/src/test/java/org/hawkular/inventory/rest/test/InventoryITest.java
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.rest.test; import static org.hawkular.inventory.api.Relationships.WellKnown.contains; import static org.hawkular.inventory.api.Relationships.WellKnown.defines; import static org.hawkular.inventory.api.Relationships.WellKnown.incorporates; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Spliterator; import java.util.Spliterators; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.hawkular.accounts.api.model.HawkularUser; import org.hawkular.accounts.api.model.Persona; import org.hawkular.inventory.api.model.Entity; import org.hawkular.inventory.api.model.Environment; import org.hawkular.inventory.api.model.Feed; import org.hawkular.inventory.api.model.Metric; import org.hawkular.inventory.api.model.MetricDataType; import org.hawkular.inventory.api.model.MetricType; import org.hawkular.inventory.api.model.MetricUnit; import org.hawkular.inventory.api.model.OperationType; import org.hawkular.inventory.api.model.Relationship; import org.hawkular.inventory.api.model.Resource; import org.hawkular.inventory.api.model.ResourceType; import org.hawkular.inventory.paths.CanonicalPath; import org.hawkular.inventory.paths.PathSegmentCodec; import org.hawkular.inventory.paths.SegmentType; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.squareup.okhttp.Response; /** * @author <a href="https://github.com/ppalaga">Peter Palaga</a> * */ public class InventoryITest extends AbstractTestBase { protected static final String basePath = "/hawkular/inventory"; protected static final String urlTypeId = "URL"; protected static final String testEnvId = "test"; protected static final String environmentId = "itest-env-" + UUID.randomUUID().toString(); protected static final String pingableHostRTypeId = "itest-pingable-host-" + UUID.randomUUID().toString(); protected static final String roomRTypeId = "itest-room-type-" + UUID.randomUUID().toString(); protected static final String copyMachineRTypeId = "itest-copy-machine-type-" + UUID.randomUUID().toString(); protected static final String date20150626 = "2015-06-26"; protected static final String date20160801 = "2016-08-01"; protected static final String expectedLifetime15years = "15y"; protected static final String facilitiesDept = "Facilities"; protected static final String itDept = "IT"; protected static final String typeVersion = "1.0"; protected static final String responseTimeMTypeId = "itest-response-time-" + UUID.randomUUID().toString(); protected static final String responseStatusCodeMTypeId = "itest-response-status-code-" + UUID.randomUUID().toString(); protected static final String statusDurationMTypeId = "status.duration.type"; protected static final String statusCodeMTypeId = "status.code.type"; protected static final String host1ResourceId = "itest-host1-" + UUID.randomUUID().toString(); protected static final String host2ResourceId = "itest-host2-" + UUID.randomUUID().toString(); protected static final String room1ResourceId = "itest-room1-" + UUID.randomUUID().toString(); protected static final String copyMachine1ResourceId = "itest-copy-machine1-" + UUID.randomUUID().toString(); protected static final String copyMachine2ResourceId = "itest-copy-machine2-" + UUID.randomUUID().toString(); protected static final String responseTimeMetricId = "itest-response-time-" + host1ResourceId; protected static final String responseStatusCodeMetricId = "itest-response-status-code-" + host1ResourceId; protected static final String feedId = "itest-feed-" + UUID.randomUUID().toString(); protected static final String bulkResourcePrefix = "bulk-resource-" + UUID.randomUUID().toString(); protected static final String bulkResourceTypePrefix = "bulk-resource-type-" + UUID.randomUUID().toString(); protected static final String bulkMetricTypePrefix = "bulk-metric-type-" + UUID.randomUUID().toString(); protected static final String customRelationName = "inTheSameRoom"; /* key is the path to delete while value is the path to GET to verify the deletion */ protected static Map<String, String> pathsToDelete = new LinkedHashMap<>(); protected static String tenantId; @BeforeClass public static void setupData() throws Throwable { Persona persona = getWithRetries("/hawkular/accounts/personas/current", HawkularUser.class, 10, 2000); tenantId = persona.getIdAsUUID().toString(); Assert.assertTrue("Cannot get the current tenant id.", tenantId != null && !tenantId.trim().isEmpty()); /* assert the test environment exists */ /* There is a race condition when WildFly agent is enabled: both this test and Agent trigger the autocreation of test entities simultaneously, and one of them may get only a partially initialized state. That is why we do several delayed attempts do perform the first request. */ String path = "/hawkular/inventory/environments/" + testEnvId; Environment env = getWithRetries(path, Environment.class, 10, 2000); assertEquals("Unable to get the '" + testEnvId + "' environment.", testEnvId, env.getId()); /* Create an environment that will be used exclusively by this test */ Response response = postDeletable("environments", Environment.Blueprint.builder().withId(environmentId).build()); assertEquals(201, response.code()); Environment environment = mapper.readValue(response.body().string(), Environment.class); assertEquals(environmentId, environment.getId()); assertEquals(CanonicalPath.of().tenant(tenantId).environment(environmentId).get(), environment.getPath()); assertEquals(baseURI + basePath + "/environments/" + environmentId, response.headers().get("Location")); /* URL resource type should have been autocreated */ path = basePath + "/resourceTypes/" + urlTypeId; ResourceType resourceType = getWithRetries(path, ResourceType.class, 10, 2000); assertEquals("Unable to get the '" + urlTypeId + "' resource type.", urlTypeId, resourceType.getId()); assertEquals(urlTypeId, resourceType.getId()); /* Create pingable host resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(pingableHostRTypeId).build()); assertEquals(201, response.code()); ResourceType pingableHost = mapper.readValue(response.body().string(), ResourceType.class); assertEquals(pingableHostRTypeId, pingableHost.getId()); assertEquals(baseURI + basePath +"/resourceTypes/" + pingableHostRTypeId, response.headers().get("Location")); /* Create room resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(roomRTypeId) .withProperty("expectedLifetime", expectedLifetime15years)// .withProperty("ownedByDepartment", facilitiesDept).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/resourceTypes/" + roomRTypeId, response.headers().get("Location")); /* Create copy machine resource type */ response = postDeletable("resourceTypes", ResourceType.Blueprint.builder().withId(copyMachineRTypeId) .withProperty("expectedLifetime", expectedLifetime15years)// .withProperty("ownedByDepartment", itDept).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/resourceTypes/" + copyMachineRTypeId, response.headers().get("Location")); /* Create a metric type */ response = postDeletable("metricTypes", MetricType.Blueprint.builder(MetricDataType.COUNTER) .withId(responseTimeMTypeId)// .withUnit(MetricUnit.MILLISECONDS)// .withInterval(1L)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/metricTypes/" + responseTimeMTypeId, response.headers().get("Location")); /* Create another metric type */ response = postDeletable("metricTypes", MetricType.Blueprint.builder(MetricDataType.GAUGE) .withId(responseStatusCodeMTypeId)// .withUnit(MetricUnit.NONE)// .withInterval(1L)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/metricTypes/" + responseStatusCodeMTypeId, response.headers().get("Location")); /* link pingableHostRTypeId with responseTimeMTypeId and responseStatusCodeMTypeId */ path = basePath + "/resourceTypes/" + pingableHostRTypeId +"/metricTypes"; //just testing that both relative and canonical paths work when referencing the types response = post(path, "[\"../" + responseTimeMTypeId +"\", \"/" + responseStatusCodeMTypeId +"\"]"); assertEquals(204, response.code()); //we will try deleting the associations between resource types and metric types, too //this is not necessary because deleting either the resource type or the metric type will take care of it anyway //but this is to test that explicit deletes work, too // XXX this should check for removal of a single association. // OkHttp unconditionally canonicalizes the URL paths, which makes the below constructs impossible to send // over the wire using OkHttp (even though they're perfectly valid URLs). //pathsToDelete.put(path + "/../" + responseTimeMTypeId, path +"/../" + responseTimeMTypeId); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths //pathsToDelete.put(path + "/../" + responseStatusCodeMTypeId, path +"/../" + responseStatusCodeMTypeId); /* add a metric */ response = postDeletable(environmentId +"/metrics", Metric.Blueprint.builder() .withId(responseTimeMetricId) // .withMetricTypePath("../" + responseTimeMTypeId) // .build()); //path relative to env assertEquals(201, response.code()); Metric responseTimeMetric = mapper.readValue(response.body().string(), Metric.class); assertEquals(responseTimeMetricId, responseTimeMetric.getId()); assertEquals(baseURI + basePath +"/" + environmentId + "/metrics/" + responseTimeMetricId, response.headers().get("Location")); /* add another metric */ response = postDeletable(environmentId +"/metrics", Metric.Blueprint.builder() .withId(responseStatusCodeMetricId) // .withMetricTypePath("/" + responseStatusCodeMTypeId) // .build()); assertEquals(201, response.code()); Metric responseStatusCode = mapper.readValue(response.body().string(), Metric.class); assertEquals(responseStatusCodeMetricId, responseStatusCode.getId()); assertEquals(baseURI + basePath +"/" + environmentId + "/metrics/" + responseStatusCodeMetricId, response.headers().get("Location")); /* add a resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(host1ResourceId) // .withResourceTypePath("../" + pingableHostRTypeId) // .build()); assertEquals(201, response.code()); Resource host1Resource = mapper.readValue(response.body().string(), Resource.class); assertEquals(host1ResourceId, host1Resource.getId()); assertEquals(CanonicalPath.of().tenant(tenantId).environment(environmentId). resource(host1ResourceId).get(), host1Resource.getPath()); assertEquals(CanonicalPath.of().tenant(tenantId).resourceType(pingableHostRTypeId).get(), host1Resource.getType().getPath()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + host1ResourceId, response.headers().get("Location")); /* add another resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder()// .withId(host2ResourceId)// .withResourceTypePath("../" + pingableHostRTypeId)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + host2ResourceId, response.headers().get("Location")); /* add a room resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder().withId(room1ResourceId).withResourceTypePath("../" + roomRTypeId) .withProperty("purchaseDate", date20150626).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId, response.headers().get("Location")); /* add a copy machine resource */ response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(copyMachine1ResourceId) // .withResourceTypePath("../" + copyMachineRTypeId)// .withProperty("purchaseDate", date20150626)// .withProperty("nextMaintenanceDate", date20160801)// .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + copyMachine1ResourceId, response.headers().get("Location")); response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder() // .withId(copyMachine2ResourceId) // .withResourceTypePath("../" + copyMachineRTypeId) // .withProperty("purchaseDate", date20160801) // .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + copyMachine2ResourceId, response.headers().get("Location")); /* add child resources */ response = postDeletable(environmentId + "/resources/" + room1ResourceId, Resource.Blueprint.builder().withId("table").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg/1").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg%2F1", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg 2").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg%202", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg;3").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg;3", response.headers().get("Location")); response = postDeletable(environmentId + "/resources/" + room1ResourceId +"/table", Resource.Blueprint.builder().withId("leg-4").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/" + room1ResourceId +"/table/leg-4", response.headers().get("Location")); //alternative child hierarchies response = postDeletable(environmentId + "/resources", Resource.Blueprint.builder().withId("weapons").withResourceTypePath("/" + roomRTypeId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId + "/resources/weapons", response.headers().get("Location")); path = basePath + "/" + environmentId + "/resources/weapons/children"; response = post(path, JsonNodeFactory.instance.arrayNode() // .add("/e;" + environmentId + "/r;" + room1ResourceId + "/r;table/r;leg%2F1") // .add("../" + room1ResourceId + "/table/leg-4") // .toString()); assertEquals(204, response.code()); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths // pathsToDelete.put(path + "/../table/leg%2F1", path + "/../table/leg%2F1") // pathsToDelete.put(path + "/../table/leg-4", path + "/../table/leg-4") /* link the metric to resource */ path = basePath + "/" + environmentId + "/resources/" + host1ResourceId +"/metrics"; response = post(path, JsonNodeFactory.instance.arrayNode() // .add("/e;"+ environmentId + "/m;"+ responseTimeMetricId) .add("/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId) .toString()); assertEquals(204, response.code()); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths // pathsToDelete.put(path + "/../" + responseTimeMetricId, path + "/../" + responseTimeMetricId); // XXX again, this is impossible due to OkHttp unconditionally canonicalizing the URL paths //pathsToDelete.put(path + "/../" + responseStatusCodeMetricId, path + "/../" + responseStatusCodeMetricId); /* add a feed */ response = postDeletable("feeds", Feed.Blueprint.builder().withId(feedId).build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/feeds/" + feedId, response.headers().get("Location")); /* add a custom relationship, no need to clean up, it'll be deleted together with the resources */ Map<String, Object> properties = new LinkedHashMap<>(); properties.put("from", "2000-01-01"); properties.put("confidence", "90%"); CanonicalPath src = CanonicalPath.fromString("/t;" + tenantId + "/e;" + environmentId + "/r;" + host2ResourceId); CanonicalPath target = CanonicalPath.fromString("/t;" + tenantId + "/e;" + environmentId + "/r;" + host1ResourceId); Relationship h1h2Rel = new Relationship("42", customRelationName, src, target, properties); response = postNew(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/relationships", h1h2Rel); assertEquals(201, response.code()); JsonNode h1h2Json = mapper.readTree(response.body().string()); assertEquals(customRelationName, h1h2Json.get("name").asText()); // relationship with tenant CanonicalPath tenantPath = CanonicalPath.fromString("/t;" + tenantId); Relationship tenantRel = new Relationship(UUID.randomUUID().toString(), "sampleRelationship", tenantPath, tenantPath); post(basePath + "/tenants/relationships", mapper.writeValueAsString(tenantRel)); assertEquals(201, response.code()); // add operation type to the resource type response = postDeletable("resourceTypes/" + pingableHostRTypeId +"/operationTypes", OperationType.Blueprint.builder().withId("start").build()); assertEquals(201, response.code()); response = postDeletable("resourceTypes/" + pingableHostRTypeId +"/operationTypes", OperationType.Blueprint.builder().withId("stop").build()); assertEquals(201, response.code()); // add some parameters to it String startOpParamTypes = "{" // + "\"role\" : \"parameterTypes\"," // + "\"value\": {" // + "\"title\" : \"blah\"," // + "\"type\": \"object\"," // + "\"properties\": { \"quick\": { \"type\": \"boolean\"}}" // + "}" // + "}"; response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", startOpParamTypes); assertEquals(201, response.code()); response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", "{\"role\": \"returnType\", \"value\": {\"title\": \"blah\", \"type\": \"boolean\"}}"); assertEquals(201, response.code()); /* add a resource type json schema */ String schema = "{" + "\"value\": {" // + "\"title\" : \"Character\"," // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"firstName\" : {\"type\": \"string\"}," // + "\"secondName\": {\"type\": \"string\"}," // + "\"age\" : {" // + "\"type\" : \"integer\"," // + "\"minimum\" : 0," // + "\"exclusiveMinimum\": false" // + "}," // + "\"male\" : {" // + "\"description\": \"true if the character is a male\"," // + "\"type\" : \"boolean\"" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"something\": {\"type\": \"string\"}," // + "\"someArray\": {" // + "\"type\" : \"array\"," // + "\"minItems\" : 3," // + "\"items\" : {\"type\": \"integer\"}," // + "\"uniqueItems\": false" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"question\": {" // + "\"type\" : \"string\"," // + "\"pattern\": \"^.*\\\\?$\"" // + "}," // + "\"answer\" : {" // + "\"description\": \"the answer (example of any type)\"" // + "}," // + "\"foo\" : {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"foo\": {" // + "\"type\" : \"object\"," // + "\"properties\": {" // + "\"fear\" : {" // + "\"type\": \"string\"," // + "\"enum\": [\"dentists\", \"lawyers\", \"rats\"]" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}" // + "}," // + "\"required\" : [\"firstName\", \"secondName\", \"male\", \"age\", \"foo\"]" // + "}," // + "\"role\" : \"configurationSchema\"" // + "}"; response = post(basePath + "/resourceTypes/" + pingableHostRTypeId +"/data", schema); assertEquals(201, response.code()); /* add an invalid config data to a resource (invalid ~ not valid against the json schema) */ String invalidData = "{" // + "\"value\": {" // + "\"firstName\": \"John\"," // + "\"secondName\": \"Smith\"" // + "}," // + "\"role\" : \"configuration\"" // + "}"; response = post(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/data", invalidData); assertEquals(400, response.code()); /* add a config data to a resource, no need to clean up, it'll be deleted together with the resources */ String data = "{" // + "\"value\" : {" // + "\"firstName\" : \"Winston\"," // + "\"secondName\": \"Smith\"," // + "\"sdf\" : \"sdf\"," // + "\"male\" : true," // + "\"age\" : 42," // + "\"foo\" : {" // + "\"something\": \"whatever\"," // + "\"someArray\": [1, 1, 2, 3, 5, 8]," // + "\"foo\" : {" // + "\"answer\" : 5," // + "\"question\": \"2+2=?\"," // + "\"foo\" : {" // + "\"foo\": {" // + "\"fear\": \"rats\"" // + "}" // + "}" // + "}" // + "}" // + "}," // + "\"role\" : \"configuration\"," // + "\"properties\": {" // + "\"war\" : \"peace\"," // + "\"freedom\" : \"slavery\"," // + "\"ignorance\": \"strength\"" // + "}" // + "}"; response = post(basePath + "/" + environmentId +"/resources/" + host2ResourceId +"/data", data); assertEquals(201, response.code()); //add resource-owner metric response = postDeletable(environmentId +"/resources/" + host2ResourceId +"/metrics", Metric.Blueprint.builder() // .withId("resource-owned-metric") // .withMetricTypePath("/"+responseTimeMTypeId) // .build()); assertEquals(201, response.code()); assertEquals(baseURI + basePath +"/" + environmentId +"/resources/" + host2ResourceId +"/metrics/resource-owned-metric", response.headers().get("Location")); } @AfterClass public static void deleteEverything() throws IOException { /* the following would delete all data of the present user. We cannot do that as long as we do not have * a dedicated user for running this very single test class. */ // Response response = client.delete(path : basePath + "/tenant") // assertEquals(204, response.code()) /* Let's delete the entities one after another in the reverse order as we created them */ List<Map.Entry<String, String>> entries = new ArrayList<Map.Entry<String, String>>(pathsToDelete.entrySet()); Collections.reverse(entries); for (Map.Entry<String, String> en : entries) { String path = en.getKey(); String getValidationPath = en.getValue(); Response response = client.newCall(newAuthRequest().url(baseURI + path).delete().build()).execute(); assertEquals( "Could not delete path [" + baseURI + path + "]: " + response.body().string(), 204, response.code()); if (getValidationPath != null) { response = client.newCall(newAuthRequest().url(baseURI + path).build()).execute(); assertEquals("The path " + getValidationPath + " should not exist after the entity was deleted: " + response.body().string(), 404, response.code()); } } } @Test public void ping() throws Throwable { Response response = get(basePath + ""); assertEquals(200, response.code()); } @Test public void testEnvironmentsCreated() throws Throwable { assertEntitiesExist("environments", "/e;"+ testEnvId, "/e;"+ environmentId); } @Test public void testResourceTypesCreated() throws Throwable { assertEntityExists("resourceTypes/" + urlTypeId, "/rt;" + urlTypeId); assertEntityExists("resourceTypes/" + pingableHostRTypeId, "/rt;" + pingableHostRTypeId); assertEntityExists("resourceTypes/" + roomRTypeId, "/rt;" + roomRTypeId); // commented out as it interfers with WildFly Agent // assertEntitiesExist("resourceTypes", [urlTypeId, pingableHostRTypeId, roomRTypeId]) } @Test public void testMetricTypesCreated() throws Throwable { assertEntityExists("metricTypes/" + responseTimeMTypeId, "/mt;" + responseTimeMTypeId); assertEntityExists("metricTypes/" + statusDurationMTypeId, "/mt;" + statusDurationMTypeId); assertEntityExists("metricTypes/" + statusCodeMTypeId, "/mt;" + statusCodeMTypeId); // commented out as it interfers with WildFly Agent // assertEntitiesExist("metricTypes", // [responseTimeMTypeId, responseStatusCodeMTypeId, statusDurationMTypeId, statusCodeMTypeId]) } @Test public void testOperationTypesCreated() throws Throwable { Response response = get(basePath + "/resourceTypes/" + pingableHostRTypeId +"/operationTypes"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); assertEntityExists("resourceTypes/" + pingableHostRTypeId +"/operationTypes/start", "/rt;" + pingableHostRTypeId + "/ot;start"); assertEntityExists("resourceTypes/" + pingableHostRTypeId +"/operationTypes/start/data", new String[] {"dataType", "returnType"}, "/rt;" + pingableHostRTypeId + "/ot;start/d;returnType"); assertEntityExists("resourceTypes/" + pingableHostRTypeId + "/operationTypes/start/data", new String[] { "dataType", "parameterTypes" }, "/rt;" + pingableHostRTypeId + "/ot;start/d;parameterTypes"); } @Test public void testMetricTypesLinked() throws Throwable { assertEntitiesExist("resourceTypes/" + pingableHostRTypeId +"/metricTypes", "/mt;" + responseTimeMTypeId, "/mt;" + responseStatusCodeMTypeId); } @Test public void testResourcesCreated() throws Throwable { assertEntityExists(environmentId + "/resources/" + host1ResourceId, "/e;" + environmentId + "/r;" + host1ResourceId); assertEntityExists(environmentId + "/resources/" + host2ResourceId, "/e;" + environmentId + "/r;" + host2ResourceId); assertEntityExists(environmentId + "/resources/" + room1ResourceId, "/e;" + environmentId + "/r;" + room1ResourceId); } @Test public void testResourcesFilters() throws Throwable { /* filter by resource properties */ Response response = get(basePath + "/" + environmentId +"/resources", "properties", "purchaseDate:" + date20150626, "sort", "id"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); assertEquals(room1ResourceId, json.get(1).get("id").asText()); response = get(basePath + "/" + environmentId +"/resources", "properties", "nextMaintenanceDate:"+ date20160801); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); /* query by two props at once */ response = get(basePath + "/" + environmentId +"/resources", "properties", "nextMaintenanceDate:" +date20160801 +",purchaseDate:" +date20150626); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); /* query by property existence */ response = get(basePath + "/" + environmentId +"/resources", "properties", "purchaseDate", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(3, json.size()); assertEquals(copyMachine1ResourceId, json.get(0).get("id").asText()); assertEquals(copyMachine2ResourceId, json.get(1).get("id").asText()); assertEquals(room1ResourceId, json.get(2).get("id").asText()); /* filter by type */ response = get(basePath + "/" + environmentId +"/resources", "type.id", pingableHostRTypeId); json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); response = get(basePath + "/" + environmentId +"/resources", "type.id", roomRTypeId, "type.version", typeVersion); json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); } @Test public void testMetricsCreated() throws Throwable { assertEntityExists(environmentId +"/metrics/" + responseTimeMetricId, "/e;"+ environmentId + "/m;"+ responseTimeMetricId); assertEntityExists(environmentId +"/metrics/" + responseStatusCodeMetricId, "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId); assertEntitiesExist(environmentId +"/metrics", "/e;"+ environmentId + "/m;"+ responseTimeMetricId, "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId, "/e;"+ environmentId + "/r;"+ host2ResourceId + "/m;resource-owned-metric"); } @Test public void testMetricsLinked() throws Throwable { assertEntitiesExist(environmentId +"/resources/" + host1ResourceId +"/metrics", "/e;" + environmentId + "/m;" + responseTimeMetricId, "/e;" + environmentId + "/m;" + responseStatusCodeMetricId); } @Test public void testConfigCreated() throws Throwable { assertEntityExists(environmentId +"/resources/" + host2ResourceId +"/data", "/e;" + environmentId + "/r;" + host2ResourceId + "/d;configuration"); // assertEntitiesExist(environmentId +"/resources/" // + host2ResourceId%2Ftable/data?dataType=connectionConfiguration", // ["/e;" + environmentId + "/r;" + host2ResourceId + "/d;connectionConfiguration"]) } @Test public void testPaging() throws Throwable { String path = basePath + "/" + environmentId +"/resources"; Response response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "2", "sort", "id"); JsonNode json = mapper.readTree(response.body().string()); assertEquals(2, json.size()); JsonNode first = json.get(0); JsonNode second = json.get(1); response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "1", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(first, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "1", "per_page", "1", "sort", "id"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(second, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "0", "per_page", "1", "sort", "id", "order", "desc"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(second, json.get(0)); response = get(path, "type.id", pingableHostRTypeId, "page", "1", "per_page", "1", "sort", "id", "order", "desc"); json = mapper.readTree(response.body().string()); assertEquals(1, json.size()); assertEquals(first, json.get(0)); } @Test public void testTenantsContainEnvironments() throws Throwable { assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + ""); assertRelationshipJsonldExists("tenant/relationships", tenantId, contains.name(), environmentId); } @Test public void testTenantsContainResourceTypes() throws Throwable { assertRelationshipExists("resourceTypes/" + urlTypeId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/rt;"+ urlTypeId + ""); assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/rt;" +pingableHostRTypeId); } @Test public void testTenantsContainMetricTypes() throws Throwable { assertRelationshipExists("metricTypes/" + responseTimeMTypeId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/mt;" + responseTimeMTypeId); assertRelationshipExists("tenant/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/mt;" + statusCodeMTypeId); } @Test public void testEnvironmentsContainResources() throws Throwable { assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + ""); assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + ""); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), host1ResourceId); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), host2ResourceId); } @Test public void testTenantsContainFeeds() throws Throwable { assertRelationshipExists("feeds/" + feedId +"/relationships", "/t;"+ tenantId + "", contains.name(), "/t;"+ tenantId + "/f;"+ feedId + ""); assertRelationshipJsonldExists("feeds/" + feedId +"/relationships", tenantId, contains.name(), feedId); } @Test public void testEnvironmentsContainMetrics() throws Throwable { assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseTimeMetricId + ""); assertRelationshipExists("environments/" + environmentId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "", contains.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId + ""); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), responseTimeMetricId); assertRelationshipJsonldExists("environments/" + environmentId +"/relationships", environmentId, contains.name(), responseStatusCodeMetricId); } @Test public void testResourceTypesIncorporatesMetricTypes() throws Throwable { assertRelationshipExists("resourceTypes/" + pingableHostRTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, incorporates.name(), "/t;"+ tenantId + "/mt;" + responseTimeMTypeId); assertRelationshipExists("metricTypes/" + responseStatusCodeMTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, incorporates.name(), "/t;"+ tenantId + "/mt;" + responseStatusCodeMTypeId); assertRelationshipJsonldExists("resourceTypes/" + pingableHostRTypeId +"/relationships", pingableHostRTypeId, incorporates.name(), responseTimeMTypeId); } @Test public void testResourcesIncorporatesMetrics() throws Throwable { assertRelationshipExists(environmentId +"/resources/" + host1ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", incorporates.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseStatusCodeMetricId + ""); assertRelationshipExists(environmentId +"/resources/" + host1ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", incorporates.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/m;"+ responseTimeMetricId + ""); assertRelationshipJsonldExists(environmentId +"/resources/" + host1ResourceId +"/relationships", host1ResourceId, incorporates.name(), responseTimeMetricId); } @Test public void testResourceTypesDefinesResources() throws Throwable { assertRelationshipExists("resourceTypes/" + pingableHostRTypeId +"/relationships", "/t;"+ tenantId + "/rt;" + pingableHostRTypeId, defines.name(), "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + ""); } @Test public void testMetricTypesDefinesMetrics() throws Throwable { assertRelationshipJsonldExists("metricTypes/" + responseStatusCodeMTypeId +"/relationships", responseStatusCodeMTypeId, defines.name(), responseStatusCodeMetricId); assertRelationshipJsonldExists("metricTypes/" + responseTimeMTypeId +"/relationships", responseTimeMTypeId, defines.name(), responseTimeMetricId); } @Test public void testCustomRelationship() throws Throwable { assertRelationshipJsonldExists(environmentId +"/resources/" + host2ResourceId +"/relationships", host2ResourceId, customRelationName, host1ResourceId); } @Test public void testRelationshipFiltering() throws Throwable { assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "property", "from", "propertyValue", "2000-01-01"); assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "property", "confidence", "propertyValue", "90%"); assertRelationshipExists(environmentId +"/resources/" + host2ResourceId +"/relationships", "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host2ResourceId + "", customRelationName, "/t;"+ tenantId + "/e;"+ environmentId + "/r;"+ host1ResourceId + "", "named", customRelationName); } @Test public void testResourceHierarchyQuerying() throws Throwable { assertEntitiesExist(environmentId +"/resources/" + room1ResourceId +"/children", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table"); String base = "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table"; assertEntitiesExist(environmentId +"/resources/" + room1ResourceId +"/table/children", base + "/r;leg%2F1", base + "/r;leg%202", base + "/r;leg;3", base + "/r;leg-4"); assertEntitiesExist(environmentId +"/resources/weapons/children", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table/r;leg%2F1", "/e;"+ environmentId + "/r;"+ room1ResourceId + "/r;table/r;leg-4"); } @Test @Ignore public void testResourceBulkCreate() throws Throwable { StringBuilder payload = new StringBuilder("{\"/e;test\": {\"resource\": ["); for (int i = 0; i < 100; i++) { payload.append("{ \"id\": \"" + bulkResourcePrefix + "-" + i + "\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"}"); if (i != 0) { payload.append(","); } } payload.append("]}}"); Response response = post(basePath + "/bulk", payload.toString()); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); assertEquals(100, json.size()); for (Iterator<Entry<String, JsonNode>> it = json.fields(); it.hasNext(); ) { Entry<String, JsonNode> en = it.next(); CanonicalPath p = CanonicalPath.fromString(en.getKey()); String env = p.ids().getEnvironmentId(); String rid = p.ids().getResourcePath().getSegment().getElementId(); delete(basePath + "/" + env +"/resources/" + rid); } } @Test public void testResourceBulkCreateUnderFeedWithDuplicates() throws Throwable { String pathToResType = "/t;"+ tenantId + "/f;"+ feedId + "/rt;"+ bulkResourceTypePrefix + ".1"; String payload = "{" + " \"/t;"+ tenantId + "/f;"+ feedId + "\": {" // + " \"resourceType\": [" // + " {" // + " \"id\": \""+ bulkResourceTypePrefix +".1\"" // + " }," // + " {" // + " \"id\": \""+ bulkResourceTypePrefix +".1\"" // + " }" // + " ]," // + " \"resource\": [" // + " {" // + " \"id\" : \""+ bulkResourcePrefix + ".1\"," // + " \"resourceTypePath\": \""+ pathToResType + "\"" // + " }," // + " {" // + " \"id\" : \""+ bulkResourcePrefix + ".2\"," // + " \"resourceTypePath\": \""+ pathToResType + "\"" // + " }" // + " ]," // + " \"metricType\": [" // + " {" // + " \"id\" : \""+ bulkMetricTypePrefix + ".1\"," // + " \"unit\" : \"BYTES\"," // + " \"type\" : \"GAUGE\"," // + " \"collectionInterval\": \"300\"" // + " }," // + " {" // + " \"id\" : \""+ bulkMetricTypePrefix + ".2\"," // + " \"unit\" : \"BYTES\"," // + " \"type\" : \"GAUGE\"," // + " \"collectionInterval\": \"300\"" // + " }" // + " ]" // + " }," // + " \"" + pathToResType +"\": {" // + " \"relationship\": [" // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".1\"," // + " \"direction\": \"outgoing\"" // + " }," // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".1\"," // + " \"direction\": \"outgoing\"" // + " }," // + " {" // + " \"name\" : \"incorporates\"," // + " \"otherEnd\" : \"/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + ".2\"," // + " \"direction\": \"outgoing\"" // + " }" // + " ]" // + " }" // + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode resourceCodes = json.get("resource"); JsonNode metricTypeCodes = json.get("metricType"); JsonNode resourceTypeCodes = json.get("resourceType"); JsonNode relationshipCodes = json.get("relationship"); // check, there are no dupes assertEquals(2, resourceCodes.size()); assertEquals(2, metricTypeCodes.size()); assertEquals(1, resourceTypeCodes.size()); assertEquals(2, relationshipCodes.size()); // check, no 409 was raised, because only the first status code is taken assertEquals(201, resourceCodes.get("/t;" + tenantId + "/f;" + feedId + "/r;" + bulkResourcePrefix + ".1") .asInt()); assertEquals(201, resourceCodes.get("/t;" + tenantId + "/f;" + feedId + "/r;" + bulkResourcePrefix + ".2") .asInt()); assertEquals(201, resourceTypeCodes.get(pathToResType).asInt()); assertEquals(201, metricTypeCodes.get("/t;" + tenantId + "/f;" + feedId + "/mt;" + bulkMetricTypePrefix + ".1").asInt()); assertEquals(201, metricTypeCodes.get("/t;" + tenantId + "/f;" + feedId + "/mt;" + bulkMetricTypePrefix + ".2").asInt()); assertEquals(201, relationshipCodes.get("/rl;" + PathSegmentCodec.encode(pathToResType + "-(incorporates)->" + "/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + "" + ".1")).asInt()); assertEquals(201, relationshipCodes.get("/rl;" + PathSegmentCodec.encode(pathToResType + "-(incorporates)->" + "/t;"+ tenantId + "/f;"+ feedId + "/mt;"+ bulkMetricTypePrefix + "" + ".2")).asInt()); delete(basePath + "/feeds/" + feedId +"/resources/" + bulkResourcePrefix + ".1"); delete(basePath + "/feeds/" + feedId +"/metricTypes/" + bulkMetricTypePrefix + ".1"); delete(basePath + "/feeds/" + feedId +"/metricTypes/" + bulkMetricTypePrefix + ".2"); // client.delete(path: basePath + "/feeds/" + feedId +"/resourceTypes/" + bulkResourceTypePrefix" + ".1"); } @Test public void testResourceBulkCreateWithErrors() throws Throwable { StringBuilder payload = new StringBuilder("{\"/e;" + environmentId + "\": {\"resource\": ["); //this should fail payload.append("{\"id\": \"" + room1ResourceId + "\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"},"); //this should succeed payload.append("{\"id\": \"" + bulkResourcePrefix + "-1\", \"resourceTypePath\": \"/rt;" + roomRTypeId + "\"}"); payload.append("]}}"); Response response = post(basePath + "/bulk", payload.toString()); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode codes = json.get("resource"); assertEquals(2, codes.size()); assertEquals(409, codes.get("/t;" + tenantId + "/e;" + environmentId + "/r;" + room1ResourceId).asInt()); assertEquals(201, codes.get("/t;" + tenantId + "/e;" + environmentId + "/r;" + bulkResourcePrefix + "-1").asInt()); delete(basePath + "/" + environmentId +"/resources/" + bulkResourcePrefix + "-1"); } @Test public void testBulkCreateAndRelate() throws Throwable { String epath = "/t;"+ tenantId + "/e;"+ environmentId; String rpath = epath +"/r;" + bulkResourcePrefix + "-1"; String mpath = epath +"/m;"+ responseTimeMetricId + ""; String payload = "{" // + "\"" + epath + "\": {" // + "\"resource\": [" // + "{" // + "\"id\": \"" + bulkResourcePrefix + "-1\"," // + "\"resourceTypePath\": \"/rt;" + roomRTypeId + "\"" // + "}" // + "]" // + "}," + "\"" + rpath + "\": {" // + "\"relationship\" : [" // + "{" // + "\"name\": \"incorporates\"," // + "\"otherEnd\": \"" + mpath + "\"," // + "\"direction\": \"outgoing\"" // + "}" // + "]" // + "}" // + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode resourceCodes = json.get("resource"); JsonNode relationshipCodes = json.get("relationship"); assertEquals(1, resourceCodes.size()); assertEquals(201, resourceCodes.get(rpath).asInt()); assertEquals(1, relationshipCodes.size()); assertEquals(201, relationshipCodes.fields().next().getValue().asInt()); // TODO : find out if this returning 404 instead of 204 is a bug or feature //delete(basePath + "/" + environmentId + "/resources/" + bulkResourcePrefix + "-1/metrics/../" // + responseTimeMetricId); delete(basePath + "/" + environmentId +"/resources/" + bulkResourcePrefix +"-1"); } @Test public void testComplexBulkCreate() throws Throwable { String env1 = "bulk-env-" + UUID.randomUUID().toString(); String env2 = "bulk-env-" + UUID.randomUUID().toString(); String rt1 = "bulk-URL" + UUID.randomUUID().toString(); String rt2 = "bulk-URL2" + UUID.randomUUID().toString(); String mt1 = "bulk-ResponseTime" + UUID.randomUUID().toString(); String payload = "{"// + " \"/t;"+ tenantId + "\": {"// + " \"environment\": ["// + " {"// + " \"id\": \"" + env1 + "\","// + " \"properties\": {\"key\": \"value\"},"// + " \"outgoing\": {"// + " \"customRel\": [\"/t;"+ tenantId + "\"]"// + " }"// + " },"// + " {"// + " \"id\": \""+ env2 +"\","// + " \"properties\": {\"key\": \"value2\"}"// + " }"// + " ],"// + " \"resourceType\": ["// + " {"// + " \"id\": \"" + rt1 +"\""// + " },"// + " {"// + " \"id\": \""+ rt2 +"\""// + " }"// + " ],"// + " \"metricType\": ["// + " {"// + " \"id\": \""+ mt1 +"\","// + " \"type\": \"GAUGE\","// + " \"unit\": \"MILLISECONDS\","// + " \"collectionInterval\": \"1\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/rt;" + rt1 + "\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"configurationSchema\","// + " \"value\": {"// + " \"title\": \"URL config schema\","// + " \"description\": \"A json schema describing configuration of an URL\","// + " \"type\": \"string\""// + " }"// + " }"// + " ],"// + " \"operationType\": ["// + " {"// + " \"id\": \"ping\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/rt;" + rt2 + "\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"connectionConfigurationSchema\","// + " \"value\": {"// + " \"title\": \"URL2 connection config schema\","// + " \"description\": \"A json schema describing connection to an URL\","// + " \"type\": \"string\""// + " }"// + " }"// + " ],"// + " \"operationType\": ["// + " {"// + " \"id\": \"ping-pong\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/e;" + env1 + "\": {"// + " \"resource\": ["// + " {"// + " \"id\": \"url1\","// + " \"resourceTypePath\": \"/t;"+ tenantId + "/rt;" + rt1 +"\""// + " }"// + " ],"// + " \"metric\": ["// + " {"// + " \"id\": \"url1_responseTime\","// + " \"metricTypePath\": \"/t;"+ tenantId + "/mt;"+ mt1 +"\""// + " }"// + " ]"// + " },"// + " \"/t;"+ tenantId + "/e;" + env1 +"/r;url1\": {"// + " \"dataEntity\": ["// + " {"// + " \"role\": \"configuration\","// + " \"value\": \"http://redhat.com\""// + " }"// + " ],"// + " \"relationship\": ["// + " {"// + " \"name\": \"incorporates\","// + " \"otherEnd\": \"/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime\","// + " \"direction\": \"outgoing\""// + " }"// + " ]"// + " }"// + "}"; Response response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); JsonNode json = mapper.readTree(response.body().string()); JsonNode environmentCodes = json.get("environment"); JsonNode resourceTypeCodes = json.get("resourceType"); JsonNode metricTypeCodes = json.get("metricType"); JsonNode dataCodes = json.get("dataEntity"); JsonNode operationTypeCodes = json.get("operationType"); JsonNode resourceCodes = json.get("resource"); JsonNode metricCodes = json.get("metric"); JsonNode relationshipCodes = json.get("relationship"); //now make a second call, this time only create a metadata pack. //this has to be done in two requests, because the resource types need to be fully populated before they can //be put into the pack because afterwards they're frozen payload = "{"// + " \"/t;"+ tenantId + "\": {"// + " \"metadataPack\": ["// + " {"// + " \"members\": [\"/t;"+ tenantId + "/rt;" + rt1 + "\", \"/t;"+ tenantId + "/rt;" + rt2 +"\","// + " \"/t;"+ tenantId + "/mt;"+ mt1 +"\"]"// + " }"// + " ]"// + " }"// + "}"; response = post(basePath + "/bulk", payload); assertEquals(201, response.code()); json = mapper.readTree(response.body().string()); JsonNode metadataPackCodes = json.get("metadataPack"); assertEquals(2, environmentCodes.size()); assertEquals(201, environmentCodes.get("/t;"+ tenantId + "/e;" + env1).asInt()); assertEquals(201, environmentCodes.get("/t;"+ tenantId + "/e;" + env2).asInt()); assertEquals(2, resourceTypeCodes.size()); assertEquals(201, resourceTypeCodes.get("/t;"+ tenantId + "/rt;" + rt1).asInt()); assertEquals(201, resourceTypeCodes.get("/t;"+ tenantId + "/rt;" + rt2).asInt()); assertEquals(1, metricTypeCodes.size()); assertEquals(201, metricTypeCodes.get("/t;"+ tenantId + "/mt;" + mt1).asInt()); assertEquals(3, dataCodes.size()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/rt;" + rt1 +"/d;configurationSchema").asInt()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/rt;" + rt2 +"/d;connectionConfigurationSchema").asInt()); assertEquals(201, dataCodes.get("/t;"+ tenantId + "/e;" + env1 +"/r;url1/d;configuration").asInt()); assertEquals(2, operationTypeCodes.size()); assertEquals(201, operationTypeCodes.get("/t;"+ tenantId + "/rt;" + rt1 +"/ot;ping").asInt()); assertEquals(201, operationTypeCodes.get("/t;"+ tenantId + "/rt;" + rt2 +"/ot;ping-pong").asInt()); assertEquals(1, resourceCodes.size()); assertEquals(201, resourceCodes.get("/t;"+ tenantId + "/e;" + env1 +"/r;url1").asInt()); assertEquals(1, metricCodes.size()); assertEquals(201, metricCodes.get("/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime").asInt()); assertEquals(1, relationshipCodes.size()); assertEquals(201, relationshipCodes.fields().next().getValue().asInt()); assertEquals(1, metadataPackCodes.size()); assertEquals(201, metadataPackCodes.fields().next().getValue().asInt()); response = get(basePath + "/" + env1 +"/resources/url1/metrics"); json = mapper.readTree(response.body().string()); assertEquals("/t;"+ tenantId + "/e;" + env1 +"/m;url1_responseTime", json.get(0).get("path").asText()); String mpPath = metadataPackCodes.fields().next().getKey(); String mpId = mpPath.substring(mpPath.lastIndexOf(";") + 1); delete(basePath + "/metadatapacks/" + mpId); delete(basePath + "/environments/" + env1); delete(basePath + "/environments/" + env2); delete(basePath + "/resourceTypes/" + rt1); delete(basePath + "/metricTypes/" + mt1); } @Test public void testMetadataPacks() throws Throwable { Response response = post(basePath + "/metadatapacks", "{ \"members\": [\"/t;" + tenantId + "/rt;" + urlTypeId + "\"]}"); JsonNode json = mapper.readTree(response.body().string()); String mpId = json.get("id").asText(); String url = baseURI + basePath + "/resourceTypes/" + urlTypeId; response = client.newCall(newAuthRequest().url(url).delete().build()).execute(); assertEquals("Deleting a resource type that is part of metadatapack should not be possible.", 400, response.code()); delete(basePath + "/metadatapacks/" + mpId); } @Test public void testRecursiveChildren() throws Throwable { try { Response response = post(basePath + "/" + environmentId +"/resources", "{ \"id\": \"rootResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}"); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource", "{\"id\": \"childResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}" ); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource/childResource", "{\"id\": \"grandChildResource\", \"resourceTypePath\": \"/" + urlTypeId +"\"}"); assertEquals(201, response.code()); response = post(basePath + "/" + environmentId +"/resources/rootResource/childResource", "{\"id\": \"grandChildResource2\", \"resourceTypePath\": \"/" + roomRTypeId + "\"}"); assertEquals(201, response.code()); response = get(basePath + "/" + environmentId +"/resources/rootResource/recursiveChildren", "typeId", urlTypeId); JsonNode ret = mapper.readTree(response.body().string()); assertEquals(2, ret.size()); Assert.assertTrue(toStream(ret).anyMatch(node -> "childResource".equals(node.get("id").asText()))); Assert.assertTrue(toStream(ret).anyMatch(node -> "grandChildResource".equals(node.get("id").asText()))); response = get(basePath + "/" + environmentId +"/resources/rootResource/recursiveChildren", "typeId", roomRTypeId); ret = mapper.readTree(response.body().string()); assertEquals(1, ret.size()); Assert.assertTrue(toStream(ret).anyMatch(node -> "grandChildResource2".equals(node.get("id").asText()))); } finally { delete(basePath + "/" + environmentId +"/resources/rootResource"); } } @Test public void testSync() throws Throwable { String structure = "{"// + "\"type\": \"feed\","// + "\"data\": {"// + " \"id\": \"sync-feed\""// + "},"// + "\"children\": {"// + " \"resource\": ["// + " {"// + " \"data\": {"// + " \"id\": \"resource\","// + " \"resourceTypePath\": \"resourceType\""// + " },"// + " \"children\": {"// + " \"resource\": ["// + " {"// + " \"data\": {"// + " \"id\": \"childResource\","// + " \"resourceTypePath\": \"../resourceType\""// + " }"// + " }"// + " ]"// + " }"// + " }"// + " ],"// + " \"resourceType\": ["// + " {"// + " \"data\": {"// + " \"id\": \"resourceType\","// + " \"name\": \"My Resource Type With A Friendly Name\""// + " }"// + " }"// + " ],"// + " \"metric\": ["// + " {"// + " \"data\": {"// + " \"id\": \"metric\","// + " \"metricTypePath\": \"metricType\","// + " \"collectionInterval\": 0"// + " }"// + " }"// + " ],"// + " \"metricType\": ["// + " {"// + " \"data\": {"// + " \"id\": \"metricType\","// + " \"type\": \"GAUGE\","// + " \"unit\": \"NONE\","// + " \"collectionInterval\": 0,"// + " \"name\": \"My Metric Type With A Friendly Name\""// + " }"// + " }"// + " ]"// + "}"// + "}"; try { Response response = post(basePath + "/feeds", "{\"id\": \"sync-feed\"}"); assertEquals(201, response.code()); response = post(basePath + "/sync/f;sync-feed", structure); assertEquals(204, response.code()); //check that stuff is there response = get(basePath + "/path/f;sync-feed"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/r;resource"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/r;resource/r;childResource"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/rt;resourceType"); assertEquals(200, response.code()); response = get(basePath + "/path/f;sync-feed/mt;metricType"); assertEquals(200, response.code()); } finally { Response response = get(basePath + "/path/f;sync-feed"); if (response.code() == 200) { delete(basePath + "/feeds/sync-feed"); } } } protected static void assertEntityExists(String path, String cp) throws Throwable { assertEntityExists(path, new String[0], cp); } protected static void assertEntityExists(String path, String[] queryParams, String cp) throws Throwable { Response response = get(basePath + "/" + path, queryParams); assertEquals(200, response.code()); JsonNode json = mapper.readTree(response.body().string()); assertEquals(fullCanonicalPath(cp), json.get("path").asText()); } protected static void assertEntitiesExist(String path, String... cps) throws Throwable { List<String> expectedPaths = Arrays.stream(cps).map(cp -> fullCanonicalPath(cp)).collect(Collectors.toList()); Response response = get(basePath + "/" + path); JsonNode json = mapper.readTree(response.body().string()); List<String> entityPaths = toStream(json).map(node -> node.get("path").asText()).collect(Collectors.toList()); for (Iterator<String> it = expectedPaths.iterator(); it.hasNext();) { String cp = it.next(); if (entityPaths.remove(cp)) { it.remove(); } } Assert.assertTrue("Unexpected entities with paths: " + entityPaths, entityPaths.isEmpty()); Assert.assertTrue("Following entities not found: " + expectedPaths, expectedPaths.isEmpty()); } protected static Stream<JsonNode> toStream(JsonNode json) { Spliterator<JsonNode> jsonSpliterator = Spliterators.spliteratorUnknownSize(json.elements(), 0); return StreamSupport.stream(jsonSpliterator, false); } protected static void assertRelationshipJsonldExists(String path, String source, String label, String target) throws Throwable { Response response = get(basePath + "/" + path, "jsonld", "true"); JsonNode json = mapper.readTree(response.body().string()); boolean found = toStream(json) .anyMatch(node -> source.equals(node.get("source").get("shortId").asText()) && label.equals(node.get("name").asText()) && target.equals(node.get("target").get("shortId").asText())); Assert.assertTrue("Following edge not found: source: "+ source +", name: "+ label +", target: " + target, found); } protected static void assertRelationshipExists(final String path, final String source, final String label, String target, String... query) throws Throwable { Response response = get(basePath + "/" + path, query); List<Relationship> rels = mapper.readValue(response.body().string(), new TypeReference<List<Relationship>>(){}); Assert.assertTrue("Following Relationship not found: " + source +", " + label + ", " + target, rels.stream().anyMatch( r -> source.equals(r.getSource().toString()) && label.equals(r.getName()) && target.equals(r.getTarget().toString()) ) ); } /* Add the deletable path to {@link #pathsToDelete} and send a {@code POST} request using the given map of * arguments. */ protected static Response postDeletable(String path, Entity.Blueprint blueprint) throws Throwable { String getVerificationPath = path + "/" + PathSegmentCodec.encode(blueprint.getId()); return postDeletable(path, blueprint, getVerificationPath); } protected static Response postDeletable(String path, Entity.Blueprint blueprint, String getVerificationPath) throws Throwable { String postPath = basePath + "/" + path; String key = postPath + "/" + PathSegmentCodec.encode(blueprint.getId()); pathsToDelete.put(key, basePath + "/" + getVerificationPath); return postNew(postPath, blueprint); } protected static String fullCanonicalPath(String cp) { return CanonicalPath.fromPartiallyUntypedString(cp, CanonicalPath.of().tenant(tenantId).get(), SegmentType.ANY_ENTITY) .toString(); } }
Improve sync itest.
hawkular-inventory-itest/src/test/java/org/hawkular/inventory/rest/test/InventoryITest.java
Improve sync itest.
<ide><path>awkular-inventory-itest/src/test/java/org/hawkular/inventory/rest/test/InventoryITest.java <ide> Response response = post(basePath + "/feeds", "{\"id\": \"sync-feed\"}"); <ide> assertEquals(201, response.code()); <ide> <add> response = post(basePath + "/feeds/sync-feed/resourceTypes", "{\"id\": \"doomed\"}"); <add> assertEquals(201, response.code()); <add> <add> //check that the doomed resource type is there <add> response = get(basePath + "/path/f;sync-feed/rt;doomed"); <add> assertEquals(200, response.code()); <add> <ide> response = post(basePath + "/sync/f;sync-feed", structure); <ide> <ide> assertEquals(204, response.code()); <ide> assertEquals(200, response.code()); <ide> response = get(basePath + "/path/f;sync-feed/mt;metricType"); <ide> assertEquals(200, response.code()); <add> <add> //check that the doomed resource type is gone, because it was not part of the payload from the feed <add> response = get(basePath + "/path/f;sync-feed/rt;doomed"); <add> assertEquals(404, response.code()); <ide> } finally { <ide> Response response = get(basePath + "/path/f;sync-feed"); <ide> if (response.code() == 200) {
Java
bsd-3-clause
29bae51ccd77bd73cfb402242faf1976a54ed752
0
NCIP/cananolab,NCIP/cananolab,NCIP/cananolab
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.domain.common.File; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.dto.common.FileBean; import gov.nih.nci.cananolab.dto.common.PointOfContactBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.dto.particle.SampleBean; import gov.nih.nci.cananolab.exception.FileException; import gov.nih.nci.cananolab.exception.InvalidSessionException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.SecurityException; import gov.nih.nci.cananolab.service.common.FileService; import gov.nih.nci.cananolab.service.common.impl.FileServiceLocalImpl; import gov.nih.nci.cananolab.service.sample.SampleService; import gov.nih.nci.cananolab.service.sample.impl.SampleServiceLocalImpl; import gov.nih.nci.cananolab.service.security.AuthorizationService; import gov.nih.nci.cananolab.ui.sample.InitSampleSetup; import gov.nih.nci.cananolab.ui.security.InitSecuritySetup; import gov.nih.nci.cananolab.util.ClassUtils; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.DataLinkBean; import gov.nih.nci.cananolab.util.PropertyReader; import java.io.FileInputStream; import java.net.URL; import java.util.List; import java.util.Map; import java.util.SortedSet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.validator.DynaValidatorForm; /** * Base action for all annotation actions * * @author pansu * */ public abstract class BaseAnnotationAction extends AbstractDispatchAction { /** * setupSample() will retrieve a SampleBean based on the sampleId which is in request/form. * And then check user's access privilege, throws Exception if user doesn't have privilege. * Otherwise, set visibility of Primary POC of sample based on user's privilege. * Lastly, set the SampleBean in request object. * * @param theForm * @param request * @param location * @return SampleBean * @throws Exception if user in session is not allowed to access this sample particle. */ public SampleBean setupSample(DynaValidatorForm theForm, HttpServletRequest request, String location) throws Exception { String sampleId = request.getParameter("sampleId"); if (sampleId == null) { sampleId = (String) request.getAttribute("sampleId"); if (sampleId == null) { sampleId = theForm.getString("sampleId"); } } HttpSession session = request.getSession(); UserBean user = (UserBean) session.getAttribute("user"); SampleService service = null; if (location.equals("local")) { service = new SampleServiceLocalImpl(); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); //TODO model change //service = new SampleServiceRemoteImpl(serviceUrl); } SampleBean sampleBean = service.findSampleById(sampleId); if (location.equals("local")) { // check access privilege AuthorizationService auth = new AuthorizationService(Constants.CSM_APP_NAME); boolean access = auth.isUserAllowed(sampleBean .getDomain().getName(), user); if (!access) { if (user != null) { request.getSession().removeAttribute("user"); } throw new NoAccessException( "You don't have the required privileges to access this particle"); } else { PointOfContactBean pointOfContactBean = sampleBean.getPocBean(); if (auth.isUserAllowed(pointOfContactBean.getDomain().getId().toString(), user)) { pointOfContactBean.setHidden(false); } else { pointOfContactBean.setHidden(true); } } } sampleBean.setLocation(location); request.setAttribute("theSample", sampleBean); return sampleBean; } protected void saveFilesToFileSystem(List<FileBean> files) throws Exception { // save file data to file system and set visibility AuthorizationService authService = new AuthorizationService( Constants.CSM_APP_NAME); FileService fileService = new FileServiceLocalImpl(); for (FileBean fileBean : files) { fileService.writeFile(fileBean.getDomainFile(), fileBean .getNewFileData()); fileService.assignVisibility(fileBean); } } public boolean loginRequired() { return false; } public boolean canUserExecute(UserBean user) throws SecurityException { return InitSecuritySetup.getInstance().userHasCreatePrivilege(user, Constants.CSM_PG_PARTICLE); } public Map<String, SortedSet<DataLinkBean>> setupDataTree( SampleBean sampleBean, HttpServletRequest request) throws Exception { request.setAttribute("updateDataTree", "true"); return InitSampleSetup.getInstance().getDataTree(sampleBean, request); } public ActionForward setupDeleteAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String submitType = request.getParameter("submitType"); DynaValidatorForm theForm = (DynaValidatorForm) form; SampleBean sampleBean = setupSample(theForm, request, "local"); Map<String, SortedSet<DataLinkBean>> dataTree = setupDataTree( sampleBean, request); SortedSet<DataLinkBean> dataToDelete = dataTree.get(submitType); request.getSession().setAttribute("actionName", dataToDelete.first().getDataLink()); request.getSession().setAttribute("dataToDelete", dataToDelete); return mapping.findForward("annotationDeleteView"); } // check for cases where delete can't happen protected boolean checkDelete(HttpServletRequest request, ActionMessages msgs, String id) throws Exception { return true; } public ActionForward deleteAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; String submitType = request.getParameter("submitType"); String className = InitSetup.getInstance().getClassName(submitType, request.getSession().getServletContext()); String fullClassName = ClassUtils.getFullClass(className) .getCanonicalName(); String[] dataIds = (String[]) theForm.get("idsToDelete"); SampleService sampleService = new SampleServiceLocalImpl(); ActionMessages msgs = new ActionMessages(); for (String id : dataIds) { if (!checkDelete(request, msgs, id)) { return mapping.findForward("annotationDeleteView"); } sampleService.deleteAnnotationById(fullClassName, new Long(id)); } SampleBean sampleBean = setupSample(theForm, request, "local"); setupDataTree(sampleBean, request); ActionMessage msg = new ActionMessage("message.deleteAnnotations", submitType); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); return mapping.findForward("success"); } /** * Download action to handle file downloading and viewing * * @param * @return */ public ActionForward download(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String fileId = request.getParameter("fileId"); UserBean user = (UserBean) request.getSession().getAttribute("user"); String location = request.getParameter("location"); FileService fileService = null; String remoteServerHostUrl = ""; FileBean fileBean = null; String serviceUrl = null; if (location.equals("local")) { fileService = new FileServiceLocalImpl(); } // CQL2HQL filters out subclasses, disabled the filter else { serviceUrl = InitSetup.getInstance().getGridServiceUrl(request, location); //TODO model change //fileService = new FileServiceRemoteImpl(serviceUrl); } fileBean = fileService.findFileById(fileId, user); if (fileBean != null) { if (fileBean.getDomainFile().getUriExternal()) { response.sendRedirect(fileBean.getDomainFile().getUri()); return null; } } if (!location.equals("local")) { // assume grid service is located on the same server and port as // webapp URL localURL = new URL(request.getRequestURL().toString()); String actionPath = localURL.getPath(); URL remoteUrl = new URL(serviceUrl); remoteServerHostUrl = remoteUrl.getProtocol() + "://" + remoteUrl.getHost() + ":" + remoteUrl.getPort(); String remoteDownloadUrl = remoteServerHostUrl + actionPath + "?dispatch=download" + "&fileId=" + fileId + "&location=local"; // remote URL response.sendRedirect(remoteDownloadUrl); return null; } String fileRoot = PropertyReader.getProperty( Constants.FILEUPLOAD_PROPERTY, "fileRepositoryDir"); java.io.File dFile = new java.io.File(fileRoot + java.io.File.separator + fileBean.getDomainFile().getUri()); if (dFile.exists()) { response.setContentType("application/octet-stream"); response.setHeader("Content-disposition", "attachment;filename=\"" + fileBean.getDomainFile().getName() + "\""); response.setHeader("cache-control", "Private"); java.io.InputStream in = new FileInputStream(dFile); java.io.OutputStream out = response.getOutputStream(); byte[] bytes = new byte[32768]; int numRead = 0; while ((numRead = in.read(bytes)) > 0) { out.write(bytes, 0, numRead); } out.close(); } else { ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage("error.noFile"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); throw new FileException("File " + fileBean.getDomainFile().getUri() + " doesn't exist on the server"); } return null; } protected Sample[] prepareCopy(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { String[] otherSamples = (String[]) theForm.get("otherSamples"); if (otherSamples.length == 0) { return null; } Sample[] samples = new Sample[otherSamples.length]; SampleService sampleService = new SampleServiceLocalImpl(); int i = 0; for (String other : otherSamples) { Sample sample = sampleService .findSampleByName(other); samples[i] = sample; i++; } return samples; } protected boolean validateFileBean(HttpServletRequest request, ActionMessages msgs, FileBean fileBean) { boolean noErrors = true; if (fileBean == null) { return noErrors; } File File = fileBean.getDomainFile(); if (File.getTitle().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "file title"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } if (File.getType().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "file type"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } if (File.getUriExternal()) { if (fileBean.getExternalUrl() == null || fileBean.getExternalUrl().trim().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "external url"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } } else { // all empty if ((fileBean.getUploadedFile() == null || fileBean .getUploadedFile().toString().trim().length() == 0) && (fileBean.getExternalUrl() == null || fileBean .getExternalUrl().trim().length() == 0) && (fileBean.getDomainFile() == null || fileBean .getDomainFile().getName() == null)) { ActionMessage msg = new ActionMessage("errors.required", "uploaded file"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; // the case that user switch from url to upload file, but no // file is selected } else if ((fileBean.getUploadedFile() == null || fileBean .getUploadedFile().getFileName().length() == 0) && fileBean.getExternalUrl() != null && fileBean.getExternalUrl().trim().length() > 0) { ActionMessage msg = new ActionMessage("errors.required", "uploaded file"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } } return noErrors; } public void checkVisibility(HttpServletRequest request, String location, UserBean user, FileBean fileBean) throws Exception { if (location.equals("local")) { FileService fileService = new FileServiceLocalImpl(); fileService.retrieveVisibility(fileBean, user); if (fileBean.isHidden()) { if (user != null) { request.getSession().removeAttribute("user"); throw new NoAccessException(); } else { throw new InvalidSessionException(); } } } } }
src/gov/nih/nci/cananolab/ui/core/BaseAnnotationAction.java
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.domain.common.File; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.dto.common.FileBean; import gov.nih.nci.cananolab.dto.common.PointOfContactBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.dto.particle.SampleBean; import gov.nih.nci.cananolab.exception.FileException; import gov.nih.nci.cananolab.exception.InvalidSessionException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.SecurityException; import gov.nih.nci.cananolab.service.common.FileService; import gov.nih.nci.cananolab.service.common.impl.FileServiceLocalImpl; import gov.nih.nci.cananolab.service.sample.SampleService; import gov.nih.nci.cananolab.service.sample.impl.SampleServiceLocalImpl; import gov.nih.nci.cananolab.service.security.AuthorizationService; import gov.nih.nci.cananolab.ui.sample.InitSampleSetup; import gov.nih.nci.cananolab.ui.security.InitSecuritySetup; import gov.nih.nci.cananolab.util.ClassUtils; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.DataLinkBean; import gov.nih.nci.cananolab.util.PropertyReader; import java.io.FileInputStream; import java.net.URL; import java.util.List; import java.util.Map; import java.util.SortedSet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.validator.DynaValidatorForm; /** * Base action for all annotation actions * * @author pansu * */ public abstract class BaseAnnotationAction extends AbstractDispatchAction { public SampleBean setupSample(DynaValidatorForm theForm, HttpServletRequest request, String location) throws Exception { String sampleId = request.getParameter("sampleId"); if (sampleId == null) { sampleId = (String) request.getAttribute("sampleId"); if (sampleId == null) { sampleId = theForm.getString("sampleId"); } } HttpSession session = request.getSession(); UserBean user = (UserBean) session.getAttribute("user"); SampleService service = null; if (location.equals("local")) { service = new SampleServiceLocalImpl(); } else { String serviceUrl = InitSetup.getInstance().getGridServiceUrl( request, location); //TODO model change //service = new SampleServiceRemoteImpl(serviceUrl); } SampleBean sampleBean = service .findSampleById(sampleId); if (location.equals("local")) { // check access privilege AuthorizationService auth = new AuthorizationService( Constants.CSM_APP_NAME); boolean access = auth.isUserAllowed(sampleBean .getDomain().getName(), user); if (!access) { if (user != null) { request.getSession().removeAttribute("user"); } throw new NoAccessException( "You don't have the required privileges to access this particle"); } else { PointOfContactBean pointOfContactBean = sampleBean .getPocBean(); if (auth.isUserAllowed(pointOfContactBean.getDomain().getId() .toString(), user)) { pointOfContactBean.setHidden(false); } else { pointOfContactBean.setHidden(true); } } } sampleBean.setLocation(location); request.setAttribute("theSample", sampleBean); return sampleBean; } protected void saveFilesToFileSystem(List<FileBean> files) throws Exception { // save file data to file system and set visibility AuthorizationService authService = new AuthorizationService( Constants.CSM_APP_NAME); FileService fileService = new FileServiceLocalImpl(); for (FileBean fileBean : files) { fileService.writeFile(fileBean.getDomainFile(), fileBean .getNewFileData()); fileService.assignVisibility(fileBean); } } public boolean loginRequired() { return false; } public boolean canUserExecute(UserBean user) throws SecurityException { return InitSecuritySetup.getInstance().userHasCreatePrivilege(user, Constants.CSM_PG_PARTICLE); } public Map<String, SortedSet<DataLinkBean>> setupDataTree( SampleBean sampleBean, HttpServletRequest request) throws Exception { request.setAttribute("updateDataTree", "true"); return InitSampleSetup.getInstance().getDataTree(sampleBean, request); } public ActionForward setupDeleteAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String submitType = request.getParameter("submitType"); DynaValidatorForm theForm = (DynaValidatorForm) form; SampleBean sampleBean = setupSample(theForm, request, "local"); Map<String, SortedSet<DataLinkBean>> dataTree = setupDataTree( sampleBean, request); SortedSet<DataLinkBean> dataToDelete = dataTree.get(submitType); request.getSession().setAttribute("actionName", dataToDelete.first().getDataLink()); request.getSession().setAttribute("dataToDelete", dataToDelete); return mapping.findForward("annotationDeleteView"); } // check for cases where delete can't happen protected boolean checkDelete(HttpServletRequest request, ActionMessages msgs, String id) throws Exception { return true; } public ActionForward deleteAll(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaValidatorForm theForm = (DynaValidatorForm) form; String submitType = request.getParameter("submitType"); String className = InitSetup.getInstance().getClassName(submitType, request.getSession().getServletContext()); String fullClassName = ClassUtils.getFullClass(className) .getCanonicalName(); String[] dataIds = (String[]) theForm.get("idsToDelete"); SampleService sampleService = new SampleServiceLocalImpl(); ActionMessages msgs = new ActionMessages(); for (String id : dataIds) { if (!checkDelete(request, msgs, id)) { return mapping.findForward("annotationDeleteView"); } sampleService.deleteAnnotationById(fullClassName, new Long(id)); } SampleBean sampleBean = setupSample(theForm, request, "local"); setupDataTree(sampleBean, request); ActionMessage msg = new ActionMessage("message.deleteAnnotations", submitType); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); saveMessages(request, msgs); return mapping.findForward("success"); } /** * Download action to handle file downloading and viewing * * @param * @return */ public ActionForward download(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String fileId = request.getParameter("fileId"); UserBean user = (UserBean) request.getSession().getAttribute("user"); String location = request.getParameter("location"); FileService fileService = null; String remoteServerHostUrl = ""; FileBean fileBean = null; String serviceUrl = null; if (location.equals("local")) { fileService = new FileServiceLocalImpl(); } // CQL2HQL filters out subclasses, disabled the filter else { serviceUrl = InitSetup.getInstance().getGridServiceUrl(request, location); //TODO model change //fileService = new FileServiceRemoteImpl(serviceUrl); } fileBean = fileService.findFileById(fileId, user); if (fileBean != null) { if (fileBean.getDomainFile().getUriExternal()) { response.sendRedirect(fileBean.getDomainFile().getUri()); return null; } } if (!location.equals("local")) { // assume grid service is located on the same server and port as // webapp URL localURL = new URL(request.getRequestURL().toString()); String actionPath = localURL.getPath(); URL remoteUrl = new URL(serviceUrl); remoteServerHostUrl = remoteUrl.getProtocol() + "://" + remoteUrl.getHost() + ":" + remoteUrl.getPort(); String remoteDownloadUrl = remoteServerHostUrl + actionPath + "?dispatch=download" + "&fileId=" + fileId + "&location=local"; // remote URL response.sendRedirect(remoteDownloadUrl); return null; } String fileRoot = PropertyReader.getProperty( Constants.FILEUPLOAD_PROPERTY, "fileRepositoryDir"); java.io.File dFile = new java.io.File(fileRoot + java.io.File.separator + fileBean.getDomainFile().getUri()); if (dFile.exists()) { response.setContentType("application/octet-stream"); response.setHeader("Content-disposition", "attachment;filename=\"" + fileBean.getDomainFile().getName() + "\""); response.setHeader("cache-control", "Private"); java.io.InputStream in = new FileInputStream(dFile); java.io.OutputStream out = response.getOutputStream(); byte[] bytes = new byte[32768]; int numRead = 0; while ((numRead = in.read(bytes)) > 0) { out.write(bytes, 0, numRead); } out.close(); } else { ActionMessages msgs = new ActionMessages(); ActionMessage msg = new ActionMessage("error.noFile"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); throw new FileException("File " + fileBean.getDomainFile().getUri() + " doesn't exist on the server"); } return null; } protected Sample[] prepareCopy(HttpServletRequest request, DynaValidatorForm theForm) throws Exception { String[] otherSamples = (String[]) theForm.get("otherSamples"); if (otherSamples.length == 0) { return null; } Sample[] samples = new Sample[otherSamples.length]; SampleService sampleService = new SampleServiceLocalImpl(); int i = 0; for (String other : otherSamples) { Sample sample = sampleService .findSampleByName(other); samples[i] = sample; i++; } return samples; } protected boolean validateFileBean(HttpServletRequest request, ActionMessages msgs, FileBean fileBean) { boolean noErrors = true; if (fileBean == null) { return noErrors; } File File = fileBean.getDomainFile(); if (File.getTitle().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "file title"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } if (File.getType().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "file type"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } if (File.getUriExternal()) { if (fileBean.getExternalUrl() == null || fileBean.getExternalUrl().trim().length() == 0) { ActionMessage msg = new ActionMessage("errors.required", "external url"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } } else { // all empty if ((fileBean.getUploadedFile() == null || fileBean .getUploadedFile().toString().trim().length() == 0) && (fileBean.getExternalUrl() == null || fileBean .getExternalUrl().trim().length() == 0) && (fileBean.getDomainFile() == null || fileBean .getDomainFile().getName() == null)) { ActionMessage msg = new ActionMessage("errors.required", "uploaded file"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; // the case that user switch from url to upload file, but no // file is selected } else if ((fileBean.getUploadedFile() == null || fileBean .getUploadedFile().getFileName().length() == 0) && fileBean.getExternalUrl() != null && fileBean.getExternalUrl().trim().length() > 0) { ActionMessage msg = new ActionMessage("errors.required", "uploaded file"); msgs.add(ActionMessages.GLOBAL_MESSAGE, msg); this.saveErrors(request, msgs); noErrors = false; } } return noErrors; } public void checkVisibility(HttpServletRequest request, String location, UserBean user, FileBean fileBean) throws Exception { if (location.equals("local")) { FileService fileService = new FileServiceLocalImpl(); fileService.retrieveVisibility(fileBean, user); if (fileBean.isHidden()) { if (user != null) { request.getSession().removeAttribute("user"); throw new NoAccessException(); } else { throw new InvalidSessionException(); } } } } }
Just adding comments for function "setupSample()". SVN-Revision: 15029
src/gov/nih/nci/cananolab/ui/core/BaseAnnotationAction.java
Just adding comments for function "setupSample()".
<ide><path>rc/gov/nih/nci/cananolab/ui/core/BaseAnnotationAction.java <ide> */ <ide> public abstract class BaseAnnotationAction extends AbstractDispatchAction { <ide> <add> /** <add> * setupSample() will retrieve a SampleBean based on the sampleId which is in request/form. <add> * And then check user's access privilege, throws Exception if user doesn't have privilege. <add> * Otherwise, set visibility of Primary POC of sample based on user's privilege. <add> * Lastly, set the SampleBean in request object. <add> * <add> * @param theForm <add> * @param request <add> * @param location <add> * @return SampleBean <add> * @throws Exception if user in session is not allowed to access this sample particle. <add> */ <ide> public SampleBean setupSample(DynaValidatorForm theForm, <ide> HttpServletRequest request, String location) throws Exception { <ide> String sampleId = request.getParameter("sampleId"); <ide> //TODO model change <ide> //service = new SampleServiceRemoteImpl(serviceUrl); <ide> } <del> SampleBean sampleBean = service <del> .findSampleById(sampleId); <add> SampleBean sampleBean = service.findSampleById(sampleId); <ide> if (location.equals("local")) { <ide> // check access privilege <del> AuthorizationService auth = new AuthorizationService( <del> Constants.CSM_APP_NAME); <add> AuthorizationService auth = new AuthorizationService(Constants.CSM_APP_NAME); <ide> boolean access = auth.isUserAllowed(sampleBean <ide> .getDomain().getName(), user); <ide> if (!access) { <ide> throw new NoAccessException( <ide> "You don't have the required privileges to access this particle"); <ide> } else { <del> PointOfContactBean pointOfContactBean = sampleBean <del> .getPocBean(); <del> if (auth.isUserAllowed(pointOfContactBean.getDomain().getId() <del> .toString(), user)) { <add> PointOfContactBean pointOfContactBean = sampleBean.getPocBean(); <add> if (auth.isUserAllowed(pointOfContactBean.getDomain().getId().toString(), user)) { <ide> pointOfContactBean.setHidden(false); <ide> } else { <ide> pointOfContactBean.setHidden(true);
Java
apache-2.0
68b15f286b71d6a060ad3f8b8c0e4ac835153873
0
arrawatia/netty-http
/** * Copyright 2012-2014 Continuuity, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.continuuity.http; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.util.concurrent.AbstractIdleService; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelEvent; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.ChannelUpstreamHandler; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpContentCompressor; import org.jboss.netty.handler.codec.http.HttpRequestDecoder; import org.jboss.netty.handler.codec.http.HttpResponseEncoder; import org.jboss.netty.handler.execution.ExecutionHandler; import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * Webservice implemented using the netty framework. Implements Guava's Service interface to manage the states * of the webservice. */ public final class NettyHttpService extends AbstractIdleService { private static final Logger LOG = LoggerFactory.getLogger(NettyHttpService.class); private static final int CLOSE_CHANNEL_TIMEOUT = 5; private final int bossThreadPoolSize; private final int workerThreadPoolSize; private final int execThreadPoolSize; private final long execThreadKeepAliveSecs; private final Map<String, Object> channelConfigs; private final RejectedExecutionHandler rejectedExecutionHandler; private final HandlerContext handlerContext; private final ChannelGroup channelGroup; private final HttpResourceHandler resourceHandler; private final Function<ChannelPipeline, ChannelPipeline> pipelineModifier; private ServerBootstrap bootstrap; private InetSocketAddress bindAddress; private int httpChunkLimit; /** * Initialize NettyHttpService. * @param bindAddress Address for the service to bind to. * @param bossThreadPoolSize Size of the boss thread pool. * @param workerThreadPoolSize Size of the worker thread pool. * @param execThreadPoolSize Size of the thread pool for the executor. * @param execThreadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @param channelConfigs Configurations for the server socket channel. * @param rejectedExecutionHandler rejection policy for executor. * @param urlRewriter URLRewriter to rewrite incoming URLs. * @param httpHandlers HttpHandlers to handle the calls. * @param handlerHooks Hooks to be called before/after request processing by httpHandlers. */ public NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, int execThreadPoolSize, long execThreadKeepAliveSecs, Map<String, Object> channelConfigs, RejectedExecutionHandler rejectedExecutionHandler, URLRewriter urlRewriter, Iterable<? extends HttpHandler> httpHandlers, Iterable<? extends HandlerHook> handlerHooks, int httpChunkLimit) { this.bindAddress = bindAddress; this.bossThreadPoolSize = bossThreadPoolSize; this.workerThreadPoolSize = workerThreadPoolSize; this.execThreadPoolSize = execThreadPoolSize; this.execThreadKeepAliveSecs = execThreadKeepAliveSecs; this.channelConfigs = ImmutableMap.copyOf(channelConfigs); this.rejectedExecutionHandler = rejectedExecutionHandler; this.channelGroup = new DefaultChannelGroup(); this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); this.handlerContext = new BasicHandlerContext(this.resourceHandler); this.httpChunkLimit = httpChunkLimit; this.pipelineModifier = null; } /** * Initialize NettyHttpService. * @param bindAddress Address for the service to bind to. * @param bossThreadPoolSize Size of the boss thread pool. * @param workerThreadPoolSize Size of the worker thread pool. * @param execThreadPoolSize Size of the thread pool for the executor. * @param execThreadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @param channelConfigs Configurations for the server socket channel. * @param rejectedExecutionHandler rejection policy for executor. * @param urlRewriter URLRewriter to rewrite incoming URLs. * @param httpHandlers HttpHandlers to handle the calls. * @param handlerHooks Hooks to be called before/after request processing by httpHandlers. * @param pipelineModifier Function used to modify the pipeline. */ private NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, int execThreadPoolSize, long execThreadKeepAliveSecs, Map<String, Object> channelConfigs, RejectedExecutionHandler rejectedExecutionHandler, URLRewriter urlRewriter, Iterable<? extends HttpHandler> httpHandlers, Iterable<? extends HandlerHook> handlerHooks, int httpChunkLimit, Function<ChannelPipeline, ChannelPipeline> pipelineModifier) { this.bindAddress = bindAddress; this.bossThreadPoolSize = bossThreadPoolSize; this.workerThreadPoolSize = workerThreadPoolSize; this.execThreadPoolSize = execThreadPoolSize; this.execThreadKeepAliveSecs = execThreadKeepAliveSecs; this.channelConfigs = ImmutableMap.copyOf(channelConfigs); this.rejectedExecutionHandler = rejectedExecutionHandler; this.channelGroup = new DefaultChannelGroup(); this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); this.handlerContext = new BasicHandlerContext(this.resourceHandler); this.httpChunkLimit = httpChunkLimit; this.pipelineModifier = pipelineModifier; } /** * Create Execution handlers with threadPoolExecutor. * * @param threadPoolSize size of threadPool * @param threadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @return instance of {@code ExecutionHandler}. */ private ExecutionHandler createExecutionHandler(int threadPoolSize, long threadKeepAliveSecs) { ThreadFactory threadFactory = new ThreadFactory() { private final ThreadGroup threadGroup = new ThreadGroup("netty-executor-thread"); private final AtomicLong count = new AtomicLong(0); @Override public Thread newThread(Runnable r) { Thread t = new Thread(threadGroup, r, String.format("executor-%d", count.getAndIncrement())); t.setDaemon(true); return t; } }; //Create ExecutionHandler ThreadPoolExecutor threadPoolExecutor = new OrderedMemoryAwareThreadPoolExecutor(threadPoolSize, 0, 0, threadKeepAliveSecs, TimeUnit.SECONDS, threadFactory); threadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler); return new ExecutionHandler(threadPoolExecutor); } /** * Bootstrap the pipeline. * <ul> * <li>Create Execution handler</li> * <li>Setup Http resource handler</li> * <li>Setup the netty pipeline</li> * </ul> * * @param threadPoolSize Size of threadpool in threadpoolExecutor * @param threadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. */ private void bootStrap(int threadPoolSize, long threadKeepAliveSecs) { final ExecutionHandler executionHandler = (threadPoolSize) > 0 ? createExecutionHandler(threadPoolSize, threadKeepAliveSecs) : null; Executor bossExecutor = Executors.newFixedThreadPool(bossThreadPoolSize, new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("netty-boss-thread") .build()); Executor workerExecutor = Executors.newFixedThreadPool(workerThreadPoolSize, new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("netty-worker-thread") .build()); //Server bootstrap with default worker threads (2 * number of cores) bootstrap = new ServerBootstrap(new NioServerSocketChannelFactory(bossExecutor, bossThreadPoolSize, workerExecutor, workerThreadPoolSize)); bootstrap.setOptions(channelConfigs); resourceHandler.init(handlerContext); final ChannelUpstreamHandler connectionTracker = new SimpleChannelUpstreamHandler() { @Override public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e) throws Exception { channelGroup.add(e.getChannel()); super.handleUpstream(ctx, e); } }; bootstrap.setPipelineFactory(new ChannelPipelineFactory() { @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("tracker", connectionTracker); pipeline.addLast("compressor", new HttpContentCompressor()); pipeline.addLast("encoder", new HttpResponseEncoder()); pipeline.addLast("decoder", new HttpRequestDecoder()); pipeline.addLast("router", new RequestRouter(resourceHandler, httpChunkLimit)); if (executionHandler != null) { pipeline.addLast("executor", executionHandler); } pipeline.addLast("dispatcher", new HttpDispatcher()); if (pipelineModifier != null) { pipeline = pipelineModifier.apply(pipeline); } return pipeline; } }); } public static Builder builder() { return new Builder(); } @Override protected void startUp() throws Exception { LOG.info("Starting service on address {}...", bindAddress); bootStrap(execThreadPoolSize, execThreadKeepAliveSecs); Channel channel = bootstrap.bind(bindAddress); channelGroup.add(channel); bindAddress = ((InetSocketAddress) channel.getLocalAddress()); LOG.info("Started service on address {}", bindAddress); } /** * @return port where the service is running. */ public InetSocketAddress getBindAddress() { return bindAddress; } @Override protected void shutDown() throws Exception { LOG.info("Stopping service on address {}...", bindAddress); bootstrap.shutdown(); try { if (!channelGroup.close().await(CLOSE_CHANNEL_TIMEOUT, TimeUnit.SECONDS)) { LOG.warn("Timeout when closing all channels."); } } finally { resourceHandler.destroy(handlerContext); bootstrap.releaseExternalResources(); } LOG.info("Done stopping service on address {}", bindAddress); } /** * Builder to help create the NettyHttpService. */ public static class Builder { private static final int DEFAULT_BOSS_THREAD_POOL_SIZE = 1; private static final int DEFAULT_WORKER_THREAD_POOL_SIZE = 10; private static final int DEFAULT_CONNECTION_BACKLOG = 1000; private static final int DEFAULT_EXEC_HANDLER_THREAD_POOL_SIZE = 60; private static final long DEFAULT_EXEC_HANDLER_THREAD_KEEP_ALIVE_TIME_SECS = 60L; private static final RejectedExecutionHandler DEFAULT_REJECTED_EXECUTION_HANDLER = new ThreadPoolExecutor.CallerRunsPolicy(); private static final int DEFAULT_HTTP_CHUNK_LIMIT = 150 * 1024 * 1024; private Iterable<? extends HttpHandler> handlers; private Iterable<? extends HandlerHook> handlerHooks = ImmutableList.of(); private URLRewriter urlRewriter = null; private int bossThreadPoolSize; private int workerThreadPoolSize; private int execThreadPoolSize; private String host; private int port; private long execThreadKeepAliveSecs; private RejectedExecutionHandler rejectedExecutionHandler; private Map<String, Object> channelConfigs; private int httpChunkLimit; private Function<ChannelPipeline, ChannelPipeline> pipelineModifier; //Private constructor to prevent instantiating Builder instance directly. private Builder() { bossThreadPoolSize = DEFAULT_BOSS_THREAD_POOL_SIZE; workerThreadPoolSize = DEFAULT_WORKER_THREAD_POOL_SIZE; execThreadPoolSize = DEFAULT_EXEC_HANDLER_THREAD_POOL_SIZE; execThreadKeepAliveSecs = DEFAULT_EXEC_HANDLER_THREAD_KEEP_ALIVE_TIME_SECS; rejectedExecutionHandler = DEFAULT_REJECTED_EXECUTION_HANDLER; httpChunkLimit = DEFAULT_HTTP_CHUNK_LIMIT; port = 0; channelConfigs = Maps.newHashMap(); channelConfigs.put("backlog", DEFAULT_CONNECTION_BACKLOG); } /** * Modify the pipeline upon build by applying the function. * @param function Function that modifies and returns a pipeline. * @return */ public Builder modifyChannelPipeline(Function<ChannelPipeline, ChannelPipeline> function) { this.pipelineModifier = function; return this; } /** * Add HttpHandlers that service the request. * * @param handlers Iterable of HttpHandlers. * @return instance of {@code Builder}. */ public Builder addHttpHandlers(Iterable<? extends HttpHandler> handlers) { this.handlers = handlers; return this; } /** * Set HandlerHooks to be executed pre and post handler calls. They are executed in the same order as specified * by the iterable. * * @param handlerHooks Iterable of HandlerHooks. * @return an instance of {@code Builder}. */ public Builder setHandlerHooks(Iterable<? extends HandlerHook> handlerHooks) { this.handlerHooks = handlerHooks; return this; } /** * Set URLRewriter to re-write URL of an incoming request before any handlers or their hooks are called. * * @param urlRewriter instance of URLRewriter. * @return an instance of {@code Builder}. */ public Builder setUrlRewriter(URLRewriter urlRewriter) { this.urlRewriter = urlRewriter; return this; } /** * Set size of bossThreadPool in netty default value is 1 if it is not set. * * @param bossThreadPoolSize size of bossThreadPool. * @return an instance of {@code Builder}. */ public Builder setBossThreadPoolSize(int bossThreadPoolSize) { this.bossThreadPoolSize = bossThreadPoolSize; return this; } /** * Set size of workerThreadPool in netty default value is 10 if it is not set. * * @param workerThreadPoolSize size of workerThreadPool. * @return an instance of {@code Builder}. */ public Builder setWorkerThreadPoolSize(int workerThreadPoolSize) { this.workerThreadPoolSize = workerThreadPoolSize; return this; } /** * Set size of backlog in netty service - size of accept queue of the TCP stack. * * @param connectionBacklog backlog in netty server. Default value is 1000. * @return an instance of {@code Builder}. */ public Builder setConnectionBacklog(int connectionBacklog) { channelConfigs.put("backlog", connectionBacklog); return this; } /** * Sets channel configuration for the the netty service. * * @param key Name of the configuration. * @param value Value of the configuration. * @return an instance of {@code Builder}. * @see org.jboss.netty.channel.ChannelConfig * @see org.jboss.netty.channel.socket.ServerSocketChannelConfig */ public Builder setChannelConfig(String key, Object value) { channelConfigs.put(key, value); return this; } /** * Set size of executorThreadPool in netty default value is 60 if it is not set. * If the size is {@code 0}, then no executor will be used, hence calls to {@link HttpHandler} would be made from * worker threads directly. * * @param execThreadPoolSize size of workerThreadPool. * @return an instance of {@code Builder}. */ public Builder setExecThreadPoolSize(int execThreadPoolSize) { this.execThreadPoolSize = execThreadPoolSize; return this; } /** * Set threadKeepAliveSeconds - maximum time that excess idle threads will wait for new tasks before terminating. * Default value is 60 seconds. * * @param threadKeepAliveSecs thread keep alive seconds. * @return an instance of {@code Builder}. */ public Builder setExecThreadKeepAliveSeconds(long threadKeepAliveSecs) { this.execThreadKeepAliveSecs = threadKeepAliveSecs; return this; } /** * Set RejectedExecutionHandler - rejection policy for executor. * * @param rejectedExecutionHandler rejectionExecutionHandler. * @return an instance of {@code Builder}. */ public Builder setRejectedExecutionHandler(RejectedExecutionHandler rejectedExecutionHandler) { this.rejectedExecutionHandler = rejectedExecutionHandler; return this; } /** * Set the port on which the service should listen to. * By default the service will run on a random port. * * @param port port on which the service should listen to. * @return instance of {@code Builder}. */ public Builder setPort(int port) { this.port = port; return this; } /** * Set the bindAddress for the service. Default value is localhost. * * @param host bindAddress for the service. * @return instance of {@code Builder}. */ public Builder setHost(String host) { this.host = host; return this; } public Builder setHttpChunkLimit(int value) { this.httpChunkLimit = value; return this; } /** * @return instance of {@code NettyHttpService} */ public NettyHttpService build() { InetSocketAddress bindAddress; if (host == null) { bindAddress = new InetSocketAddress("localhost", port); } else { bindAddress = new InetSocketAddress(host, port); } return new NettyHttpService(bindAddress, bossThreadPoolSize, workerThreadPoolSize, execThreadPoolSize, execThreadKeepAliveSecs, channelConfigs, rejectedExecutionHandler, urlRewriter, handlers, handlerHooks, httpChunkLimit, pipelineModifier); } } }
src/main/java/com/continuuity/http/NettyHttpService.java
/** * Copyright 2012-2014 Continuuity, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.continuuity.http; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.util.concurrent.AbstractIdleService; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelEvent; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.ChannelUpstreamHandler; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.handler.codec.http.HttpContentCompressor; import org.jboss.netty.handler.codec.http.HttpRequestDecoder; import org.jboss.netty.handler.codec.http.HttpResponseEncoder; import org.jboss.netty.handler.execution.ExecutionHandler; import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * Webservice implemented using the netty framework. Implements Guava's Service interface to manage the states * of the webservice. */ public final class NettyHttpService extends AbstractIdleService { private static final Logger LOG = LoggerFactory.getLogger(NettyHttpService.class); private static final int CLOSE_CHANNEL_TIMEOUT = 5; private final int bossThreadPoolSize; private final int workerThreadPoolSize; private final int execThreadPoolSize; private final long execThreadKeepAliveSecs; private final Map<String, Object> channelConfigs; private final RejectedExecutionHandler rejectedExecutionHandler; private final HandlerContext handlerContext; private final ChannelGroup channelGroup; private final HttpResourceHandler resourceHandler; private final ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers; private ServerBootstrap bootstrap; private InetSocketAddress bindAddress; private int httpChunkLimit; /** * Initialize NettyHttpService. * @param bindAddress Address for the service to bind to. * @param bossThreadPoolSize Size of the boss thread pool. * @param workerThreadPoolSize Size of the worker thread pool. * @param execThreadPoolSize Size of the thread pool for the executor. * @param execThreadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @param channelConfigs Configurations for the server socket channel. * @param rejectedExecutionHandler rejection policy for executor. * @param urlRewriter URLRewriter to rewrite incoming URLs. * @param httpHandlers HttpHandlers to handle the calls. * @param handlerHooks Hooks to be called before/after request processing by httpHandlers. */ public NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, int execThreadPoolSize, long execThreadKeepAliveSecs, Map<String, Object> channelConfigs, RejectedExecutionHandler rejectedExecutionHandler, URLRewriter urlRewriter, Iterable<? extends HttpHandler> httpHandlers, Iterable<? extends HandlerHook> handlerHooks, int httpChunkLimit) { this.bindAddress = bindAddress; this.bossThreadPoolSize = bossThreadPoolSize; this.workerThreadPoolSize = workerThreadPoolSize; this.execThreadPoolSize = execThreadPoolSize; this.execThreadKeepAliveSecs = execThreadKeepAliveSecs; this.channelConfigs = ImmutableMap.copyOf(channelConfigs); this.rejectedExecutionHandler = rejectedExecutionHandler; this.channelGroup = new DefaultChannelGroup(); this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); this.handlerContext = new BasicHandlerContext(this.resourceHandler); this.httpChunkLimit = httpChunkLimit; this.pipelineModifiers = null; } /** * Initialize NettyHttpService. * @param bindAddress Address for the service to bind to. * @param bossThreadPoolSize Size of the boss thread pool. * @param workerThreadPoolSize Size of the worker thread pool. * @param execThreadPoolSize Size of the thread pool for the executor. * @param execThreadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @param channelConfigs Configurations for the server socket channel. * @param rejectedExecutionHandler rejection policy for executor. * @param urlRewriter URLRewriter to rewrite incoming URLs. * @param httpHandlers HttpHandlers to handle the calls. * @param handlerHooks Hooks to be called before/after request processing by httpHandlers. * @param pipelineModifiers List of functions used to modify the pipeline. */ public NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, int execThreadPoolSize, long execThreadKeepAliveSecs, Map<String, Object> channelConfigs, RejectedExecutionHandler rejectedExecutionHandler, URLRewriter urlRewriter, Iterable<? extends HttpHandler> httpHandlers, Iterable<? extends HandlerHook> handlerHooks, int httpChunkLimit, ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers) { this.bindAddress = bindAddress; this.bossThreadPoolSize = bossThreadPoolSize; this.workerThreadPoolSize = workerThreadPoolSize; this.execThreadPoolSize = execThreadPoolSize; this.execThreadKeepAliveSecs = execThreadKeepAliveSecs; this.channelConfigs = ImmutableMap.copyOf(channelConfigs); this.rejectedExecutionHandler = rejectedExecutionHandler; this.channelGroup = new DefaultChannelGroup(); this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); this.handlerContext = new BasicHandlerContext(this.resourceHandler); this.httpChunkLimit = httpChunkLimit; this.pipelineModifiers = pipelineModifiers; } /** * Create Execution handlers with threadPoolExecutor. * * @param threadPoolSize size of threadPool * @param threadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. * @return instance of {@code ExecutionHandler}. */ private ExecutionHandler createExecutionHandler(int threadPoolSize, long threadKeepAliveSecs) { ThreadFactory threadFactory = new ThreadFactory() { private final ThreadGroup threadGroup = new ThreadGroup("netty-executor-thread"); private final AtomicLong count = new AtomicLong(0); @Override public Thread newThread(Runnable r) { Thread t = new Thread(threadGroup, r, String.format("executor-%d", count.getAndIncrement())); t.setDaemon(true); return t; } }; //Create ExecutionHandler ThreadPoolExecutor threadPoolExecutor = new OrderedMemoryAwareThreadPoolExecutor(threadPoolSize, 0, 0, threadKeepAliveSecs, TimeUnit.SECONDS, threadFactory); threadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler); return new ExecutionHandler(threadPoolExecutor); } /** * Bootstrap the pipeline. * <ul> * <li>Create Execution handler</li> * <li>Setup Http resource handler</li> * <li>Setup the netty pipeline</li> * </ul> * * @param threadPoolSize Size of threadpool in threadpoolExecutor * @param threadKeepAliveSecs maximum time that excess idle threads will wait for new tasks before terminating. */ private void bootStrap(int threadPoolSize, long threadKeepAliveSecs) { final ExecutionHandler executionHandler = (threadPoolSize) > 0 ? createExecutionHandler(threadPoolSize, threadKeepAliveSecs) : null; Executor bossExecutor = Executors.newFixedThreadPool(bossThreadPoolSize, new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("netty-boss-thread") .build()); Executor workerExecutor = Executors.newFixedThreadPool(workerThreadPoolSize, new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("netty-worker-thread") .build()); //Server bootstrap with default worker threads (2 * number of cores) bootstrap = new ServerBootstrap(new NioServerSocketChannelFactory(bossExecutor, bossThreadPoolSize, workerExecutor, workerThreadPoolSize)); bootstrap.setOptions(channelConfigs); resourceHandler.init(handlerContext); final ChannelUpstreamHandler connectionTracker = new SimpleChannelUpstreamHandler() { @Override public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e) throws Exception { channelGroup.add(e.getChannel()); super.handleUpstream(ctx, e); } }; bootstrap.setPipelineFactory(new ChannelPipelineFactory() { @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("tracker", connectionTracker); pipeline.addLast("compressor", new HttpContentCompressor()); pipeline.addLast("encoder", new HttpResponseEncoder()); pipeline.addLast("decoder", new HttpRequestDecoder()); pipeline.addLast("router", new RequestRouter(resourceHandler, httpChunkLimit)); if (executionHandler != null) { pipeline.addLast("executor", executionHandler); } pipeline.addLast("dispatcher", new HttpDispatcher()); if (pipelineModifiers != null) { for(Function<ChannelPipeline, ChannelPipeline> modifier : pipelineModifiers) { pipeline = modifier.apply(pipeline); } } return pipeline; } }); } public static Builder builder() { return new Builder(); } @Override protected void startUp() throws Exception { LOG.info("Starting service on address {}...", bindAddress); bootStrap(execThreadPoolSize, execThreadKeepAliveSecs); Channel channel = bootstrap.bind(bindAddress); channelGroup.add(channel); bindAddress = ((InetSocketAddress) channel.getLocalAddress()); LOG.info("Started service on address {}", bindAddress); } /** * @return port where the service is running. */ public InetSocketAddress getBindAddress() { return bindAddress; } @Override protected void shutDown() throws Exception { LOG.info("Stopping service on address {}...", bindAddress); bootstrap.shutdown(); try { if (!channelGroup.close().await(CLOSE_CHANNEL_TIMEOUT, TimeUnit.SECONDS)) { LOG.warn("Timeout when closing all channels."); } } finally { resourceHandler.destroy(handlerContext); bootstrap.releaseExternalResources(); } LOG.info("Done stopping service on address {}", bindAddress); } /** * Builder to help create the NettyHttpService. */ public static class Builder { private static final int DEFAULT_BOSS_THREAD_POOL_SIZE = 1; private static final int DEFAULT_WORKER_THREAD_POOL_SIZE = 10; private static final int DEFAULT_CONNECTION_BACKLOG = 1000; private static final int DEFAULT_EXEC_HANDLER_THREAD_POOL_SIZE = 60; private static final long DEFAULT_EXEC_HANDLER_THREAD_KEEP_ALIVE_TIME_SECS = 60L; private static final RejectedExecutionHandler DEFAULT_REJECTED_EXECUTION_HANDLER = new ThreadPoolExecutor.CallerRunsPolicy(); private static final int DEFAULT_HTTP_CHUNK_LIMIT = 150 * 1024 * 1024; private Iterable<? extends HttpHandler> handlers; private Iterable<? extends HandlerHook> handlerHooks = ImmutableList.of(); private URLRewriter urlRewriter = null; private int bossThreadPoolSize; private int workerThreadPoolSize; private int execThreadPoolSize; private String host; private int port; private long execThreadKeepAliveSecs; private RejectedExecutionHandler rejectedExecutionHandler; private Map<String, Object> channelConfigs; private int httpChunkLimit; private ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers; //Private constructor to prevent instantiating Builder instance directly. private Builder() { bossThreadPoolSize = DEFAULT_BOSS_THREAD_POOL_SIZE; workerThreadPoolSize = DEFAULT_WORKER_THREAD_POOL_SIZE; execThreadPoolSize = DEFAULT_EXEC_HANDLER_THREAD_POOL_SIZE; execThreadKeepAliveSecs = DEFAULT_EXEC_HANDLER_THREAD_KEEP_ALIVE_TIME_SECS; rejectedExecutionHandler = DEFAULT_REJECTED_EXECUTION_HANDLER; httpChunkLimit = DEFAULT_HTTP_CHUNK_LIMIT; port = 0; channelConfigs = Maps.newHashMap(); channelConfigs.put("backlog", DEFAULT_CONNECTION_BACKLOG); pipelineModifiers = new ArrayList<Function<ChannelPipeline, ChannelPipeline>>(); } /** * Modify the pipeline upon build by applying the function. * @param function Function that modifies and returns a pipeline. * @return */ public Builder modifyChannelPipeline(Function<ChannelPipeline, ChannelPipeline> function) { pipelineModifiers.add(function); return this; } /** * Add HttpHandlers that service the request. * * @param handlers Iterable of HttpHandlers. * @return instance of {@code Builder}. */ public Builder addHttpHandlers(Iterable<? extends HttpHandler> handlers) { this.handlers = handlers; return this; } /** * Set HandlerHooks to be executed pre and post handler calls. They are executed in the same order as specified * by the iterable. * * @param handlerHooks Iterable of HandlerHooks. * @return an instance of {@code Builder}. */ public Builder setHandlerHooks(Iterable<? extends HandlerHook> handlerHooks) { this.handlerHooks = handlerHooks; return this; } /** * Set URLRewriter to re-write URL of an incoming request before any handlers or their hooks are called. * * @param urlRewriter instance of URLRewriter. * @return an instance of {@code Builder}. */ public Builder setUrlRewriter(URLRewriter urlRewriter) { this.urlRewriter = urlRewriter; return this; } /** * Set size of bossThreadPool in netty default value is 1 if it is not set. * * @param bossThreadPoolSize size of bossThreadPool. * @return an instance of {@code Builder}. */ public Builder setBossThreadPoolSize(int bossThreadPoolSize) { this.bossThreadPoolSize = bossThreadPoolSize; return this; } /** * Set size of workerThreadPool in netty default value is 10 if it is not set. * * @param workerThreadPoolSize size of workerThreadPool. * @return an instance of {@code Builder}. */ public Builder setWorkerThreadPoolSize(int workerThreadPoolSize) { this.workerThreadPoolSize = workerThreadPoolSize; return this; } /** * Set size of backlog in netty service - size of accept queue of the TCP stack. * * @param connectionBacklog backlog in netty server. Default value is 1000. * @return an instance of {@code Builder}. */ public Builder setConnectionBacklog(int connectionBacklog) { channelConfigs.put("backlog", connectionBacklog); return this; } /** * Sets channel configuration for the the netty service. * * @param key Name of the configuration. * @param value Value of the configuration. * @return an instance of {@code Builder}. * @see org.jboss.netty.channel.ChannelConfig * @see org.jboss.netty.channel.socket.ServerSocketChannelConfig */ public Builder setChannelConfig(String key, Object value) { channelConfigs.put(key, value); return this; } /** * Set size of executorThreadPool in netty default value is 60 if it is not set. * If the size is {@code 0}, then no executor will be used, hence calls to {@link HttpHandler} would be made from * worker threads directly. * * @param execThreadPoolSize size of workerThreadPool. * @return an instance of {@code Builder}. */ public Builder setExecThreadPoolSize(int execThreadPoolSize) { this.execThreadPoolSize = execThreadPoolSize; return this; } /** * Set threadKeepAliveSeconds - maximum time that excess idle threads will wait for new tasks before terminating. * Default value is 60 seconds. * * @param threadKeepAliveSecs thread keep alive seconds. * @return an instance of {@code Builder}. */ public Builder setExecThreadKeepAliveSeconds(long threadKeepAliveSecs) { this.execThreadKeepAliveSecs = threadKeepAliveSecs; return this; } /** * Set RejectedExecutionHandler - rejection policy for executor. * * @param rejectedExecutionHandler rejectionExecutionHandler. * @return an instance of {@code Builder}. */ public Builder setRejectedExecutionHandler(RejectedExecutionHandler rejectedExecutionHandler) { this.rejectedExecutionHandler = rejectedExecutionHandler; return this; } /** * Set the port on which the service should listen to. * By default the service will run on a random port. * * @param port port on which the service should listen to. * @return instance of {@code Builder}. */ public Builder setPort(int port) { this.port = port; return this; } /** * Set the bindAddress for the service. Default value is localhost. * * @param host bindAddress for the service. * @return instance of {@code Builder}. */ public Builder setHost(String host) { this.host = host; return this; } public Builder setHttpChunkLimit(int value) { this.httpChunkLimit = value; return this; } /** * @return instance of {@code NettyHttpService} */ public NettyHttpService build() { InetSocketAddress bindAddress; if (host == null) { bindAddress = new InetSocketAddress("localhost", port); } else { bindAddress = new InetSocketAddress(host, port); } return new NettyHttpService(bindAddress, bossThreadPoolSize, workerThreadPoolSize, execThreadPoolSize, execThreadKeepAliveSecs, channelConfigs, rejectedExecutionHandler, urlRewriter, handlers, handlerHooks, httpChunkLimit, pipelineModifiers); } } }
Removed Array of functions to modify. Allow only a single function
src/main/java/com/continuuity/http/NettyHttpService.java
Removed Array of functions to modify. Allow only a single function
<ide><path>rc/main/java/com/continuuity/http/NettyHttpService.java <ide> import org.slf4j.LoggerFactory; <ide> <ide> import java.net.InetSocketAddress; <del>import java.util.ArrayList; <ide> import java.util.Map; <ide> import java.util.concurrent.Executor; <ide> import java.util.concurrent.Executors; <ide> private final HandlerContext handlerContext; <ide> private final ChannelGroup channelGroup; <ide> private final HttpResourceHandler resourceHandler; <del> private final ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers; <add> private final Function<ChannelPipeline, ChannelPipeline> pipelineModifier; <ide> <ide> <ide> private ServerBootstrap bootstrap; <ide> this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); <ide> this.handlerContext = new BasicHandlerContext(this.resourceHandler); <ide> this.httpChunkLimit = httpChunkLimit; <del> this.pipelineModifiers = null; <add> this.pipelineModifier = null; <ide> } <ide> <ide> /** <ide> * @param urlRewriter URLRewriter to rewrite incoming URLs. <ide> * @param httpHandlers HttpHandlers to handle the calls. <ide> * @param handlerHooks Hooks to be called before/after request processing by httpHandlers. <del> * @param pipelineModifiers List of functions used to modify the pipeline. <del> */ <del> public NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, <add> * @param pipelineModifier Function used to modify the pipeline. <add> */ <add> private NettyHttpService(InetSocketAddress bindAddress, int bossThreadPoolSize, int workerThreadPoolSize, <ide> int execThreadPoolSize, long execThreadKeepAliveSecs, <ide> Map<String, Object> channelConfigs, <ide> RejectedExecutionHandler rejectedExecutionHandler, URLRewriter urlRewriter, <ide> Iterable<? extends HttpHandler> httpHandlers, <ide> Iterable<? extends HandlerHook> handlerHooks, int httpChunkLimit, <del> ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers) { <add> Function<ChannelPipeline, ChannelPipeline> pipelineModifier) { <ide> this.bindAddress = bindAddress; <ide> this.bossThreadPoolSize = bossThreadPoolSize; <ide> this.workerThreadPoolSize = workerThreadPoolSize; <ide> this.resourceHandler = new HttpResourceHandler(httpHandlers, handlerHooks, urlRewriter); <ide> this.handlerContext = new BasicHandlerContext(this.resourceHandler); <ide> this.httpChunkLimit = httpChunkLimit; <del> this.pipelineModifiers = pipelineModifiers; <add> this.pipelineModifier = pipelineModifier; <ide> } <ide> <ide> /** <ide> } <ide> pipeline.addLast("dispatcher", new HttpDispatcher()); <ide> <del> if (pipelineModifiers != null) { <del> for(Function<ChannelPipeline, ChannelPipeline> modifier : pipelineModifiers) { <del> pipeline = modifier.apply(pipeline); <del> } <add> if (pipelineModifier != null) { <add> pipeline = pipelineModifier.apply(pipeline); <ide> } <ide> <ide> return pipeline; <ide> private RejectedExecutionHandler rejectedExecutionHandler; <ide> private Map<String, Object> channelConfigs; <ide> private int httpChunkLimit; <del> private ArrayList<Function<ChannelPipeline, ChannelPipeline>> pipelineModifiers; <add> private Function<ChannelPipeline, ChannelPipeline> pipelineModifier; <ide> <ide> //Private constructor to prevent instantiating Builder instance directly. <ide> private Builder() { <ide> port = 0; <ide> channelConfigs = Maps.newHashMap(); <ide> channelConfigs.put("backlog", DEFAULT_CONNECTION_BACKLOG); <del> pipelineModifiers = new ArrayList<Function<ChannelPipeline, ChannelPipeline>>(); <ide> } <ide> <ide> /** <ide> * @return <ide> */ <ide> public Builder modifyChannelPipeline(Function<ChannelPipeline, ChannelPipeline> function) { <del> pipelineModifiers.add(function); <add> this.pipelineModifier = function; <ide> return this; <ide> } <ide> <ide> <ide> return new NettyHttpService(bindAddress, bossThreadPoolSize, workerThreadPoolSize, <ide> execThreadPoolSize, execThreadKeepAliveSecs, channelConfigs, rejectedExecutionHandler, <del> urlRewriter, handlers, handlerHooks, httpChunkLimit, pipelineModifiers); <add> urlRewriter, handlers, handlerHooks, httpChunkLimit, pipelineModifier); <ide> } <ide> } <ide> }
Java
mit
6d8275b7a95430be65c91d13ec55a7747d1d4ab9
0
Moudoux/EMC
package me.deftware.client.framework.wrappers.entity; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityOtherPlayerMP; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.entity.*; import net.minecraft.entity.EntityFlying; import net.minecraft.entity.boss.*; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.monster.*; import net.minecraft.entity.passive.*; import net.minecraft.entity.player.EntityPlayer; import java.awt.*; public class IEntity { private Entity entity; public IEntity(Entity e) { entity = e; } public Entity getEntity() { return entity; } public boolean isOnGround() { return ((EntityLivingBase) entity).onGround; } public void setStepHeight(float height) { entity.stepHeight = height; } public float getStepHeight() { return entity.stepHeight; } public Color getEntityColor() { if ((entity instanceof EntityAnimal)) { return Color.white; } if ((entity instanceof EntityMob)) { return Color.red; } if ((entity instanceof EntitySlime)) { return Color.green; } if ((entity instanceof EntityVillager)) { return new Color(245, 245, 220); } if ((entity instanceof EntityBat)) { return Color.BLACK; } if ((entity instanceof EntitySquid)) { return Color.PINK; } return Color.white; } public float getDistanceToPlayer() { return entity.getDistance(Minecraft.getMinecraft().player); } public String getName() { if (entity instanceof EntityPlayer) { return ((EntityPlayer) entity).getGameProfile().getName(); } return ""; } public boolean isDead() { return entity.isDead; } public boolean isMod() { return (entity instanceof EntityMob || entity instanceof EntityLiving) && !(entity instanceof EntityPlayer) && !(entity instanceof EntityItem); } public boolean isPlayer() { return entity instanceof EntityPlayer; } public boolean isItem() { return entity instanceof EntityItem; } public IItemEntity getIItemEntity() { return new IItemEntity(entity); } public IMob getIMob() { return new IMob(entity); } public IPlayer getIPlayer() { return new IPlayer((EntityPlayer) entity); } public float getHealth() { if (entity instanceof EntityLivingBase) { return ((EntityLivingBase) entity).getHealth(); } return 0; } public boolean isPlayerOwned() { if (entity instanceof EntityWolf) { if (((EntityWolf) entity).isOwner(Minecraft.getMinecraft().player)) { return true; } } return false; } public boolean isSleeping() { if (entity instanceof EntityPlayer) { return ((EntityLivingBase) entity).isPlayerSleeping(); } return false; } public boolean isInvisible() { if (entity instanceof EntityPlayer) { return ((EntityLivingBase) entity).isInvisible(); } return false; } public boolean isInvisibleToPlayer() { return entity.isInvisibleToPlayer(Minecraft.getMinecraft().player); } public boolean isSelf() { return entity == Minecraft.getMinecraft().player; } public double getPosX() { return entity.posX; } public double getPosY() { return entity.posY; } public double getPosZ() { return entity.posZ; } public double getPrevPosX() { return entity.prevPosX; } public double getPrevPosY() { return entity.prevPosY; } public double getPrevPosZ() { return entity.prevPosZ; } public double getEyeHeight() { return entity.getEyeHeight(); } public boolean canBeSeen() { return Minecraft.getMinecraft().player.canEntityBeSeen(entity); } public boolean isHostile() { if (entity instanceof EntityBlaze || entity instanceof EntityCreeper || entity instanceof EntityElderGuardian || entity instanceof EntityEndermite || entity instanceof EntityEvoker || entity instanceof EntityGhast || entity instanceof EntityGuardian || entity instanceof EntityHusk || entity instanceof EntityMagmaCube || entity instanceof EntityShulker || entity instanceof EntitySilverfish || entity instanceof EntitySkeleton || entity instanceof EntitySlime || entity instanceof EntitySpider || entity instanceof EntityStray || entity instanceof EntityVex || entity instanceof EntityVindicator || entity instanceof EntityWitch || entity instanceof EntityWitherSkeleton || entity instanceof EntityZombie || entity instanceof EntityZombieVillager || entity instanceof EntityWither) { return true; } else if (entity instanceof EntityChicken) { if (((EntityChicken) entity).chickenJockey) { return true; } } return false; } public boolean instanceOf(EntityType e) { // Generic types and players if (e.equals(EntityType.ENTITY_PLAYER_SP)) { return entity instanceof EntityPlayerSP; } else if (e.equals(EntityType.EntityOtherPlayerMP)) { return entity instanceof EntityOtherPlayerMP; } else if (e.equals(EntityType.ENTITY_PLAYER)) { return entity instanceof EntityPlayer; } else if (e.equals(EntityType.ENTITY_LIVING_BASE)) { return entity instanceof EntityLivingBase; } else if (e.equals(EntityType.ENTITY_LIVING)) { return entity instanceof EntityLiving; } else if (e.equals(EntityType.ENTITY_ITEM)) { return entity instanceof EntityItem; } else if (e.equals(EntityType.ENTITY_PROJECTILE)) { return entity instanceof IProjectile; } // Mobs if (e.equals(EntityType.ENTITY_WOLF)) { return entity instanceof EntityWolf; } else if (e.equals(EntityType.Entity_Ageable)) { return entity instanceof EntityAgeable; } else if (e.equals(EntityType.EntityAmbientCreature)) { return entity instanceof EntityAmbientCreature; } else if (e.equals(EntityType.EntityWaterMob)) { return entity instanceof EntityWaterMob; } else if (e.equals(EntityType.EntityMob)) { return entity instanceof EntityMob; } else if (e.equals(EntityType.EntitySlime) || e.equals(EntityType.ENTITY_SLIME)) { return entity instanceof EntitySlime; } else if (e.equals(EntityType.EntityFlying)) { return entity instanceof EntityFlying; } else if (e.equals(EntityType.EntityGolem)) { return entity instanceof EntityGolem; } else if (e.equals(EntityType.ENTITY_SPIDER)) { return entity instanceof EntitySpider; } else if (e.equals(EntityType.ENTITY_ZOMBIE_PIGMAN)) { return entity instanceof EntityZombie; } else if (e.equals(EntityType.ENTITY_ENDERMAN)) { return entity instanceof EntityEnderman; } else if (e.equals(EntityType.ENTITY_WITHER_SKELETON)) { return entity instanceof EntityWitherSkeleton; } else if (e.equals(EntityType.ENTITY_WITHER)) { return entity instanceof EntityWither; } else if (e.equals(EntityType.ENTITY_DRAGON)) { return entity instanceof EntityDragon; } else if (e.equals(EntityType.ENTITY_PHANTOM)) { return entity instanceof EntityPhantom; } else if (e.equals(EntityType.ENTITY_DROWNED)) { return entity instanceof EntityDrowned; } else if (e.equals(EntityType.ENTITY_EVOKER)) { return entity instanceof EntityEvoker; } else if (e.equals(EntityType.ENTITY_STRAY)) { return entity instanceof EntityStray; } else if (e.equals(EntityType.ENTITY_ELDER_GUARDIAN)) { return entity instanceof EntityElderGuardian; } else if (e.equals(EntityType.ENTITY_CREEPER)) { return entity instanceof EntityCreeper; } else if (e.equals(EntityType.ENTITY_VINDICATOR)) { return entity instanceof EntityVindicator; } else if (e.equals(EntityType.ENTITY_ILLUSIONER)) { return entity instanceof EntityIllusionIllager; } else if (e.equals(EntityType.ENTITY_HUSK)) { return entity instanceof EntityHusk; } else if (e.equals(EntityType.ENTITY_ZOMBIE)) { return entity instanceof EntityZombie; } else if (e.equals(EntityType.ENTITY_SKELETON)) { return entity instanceof EntitySkeleton; } else if (e.equals(EntityType.ENTITY_SHULKER)) { return entity instanceof EntityShulker; } else if (e.equals(EntityType.ENTITY_GUARDIAN)) { return entity instanceof EntityGuardian; } else if (e.equals(EntityType.ENTITY_VEX)) { return entity instanceof EntityVex; } else if (e.equals(EntityType.ENTITY_SILVERFISH)) { return entity instanceof EntitySilverfish; } else if (e.equals(EntityType.ENTITY_WITCH)) { return entity instanceof EntityWitch; } else if (e.equals(EntityType.ENTITY_GIANT)) { return entity instanceof EntityGiantZombie; } else if (e.equals(EntityType.ENTITY_BLAZE)) { return entity instanceof EntityBlaze; } else if (e.equals(EntityType.ENTITY_ENDERMITE)) { return entity instanceof EntityEndermite; } else if (e.equals(EntityType.ENTITY_GHAST)) { return entity instanceof EntityGhast; } else if (e.equals(EntityType.ENTITY_MAGMA_CUBE)) { return entity instanceof EntityMagmaCube; } else if (e.equals(EntityType.ENTITY_CAVE_SPIDER)) { return entity instanceof EntityCaveSpider; } return false; } public enum EntityType { ENTITY_PLAYER_SP, EntityOtherPlayerMP, ENTITY_PLAYER, EntitySlime, EntityGolem, EntityFlying, EntityMob, EntityWaterMob, ENTITY_WOLF, ENTITY_LIVING_BASE, ENTITY_LIVING, Entity_Ageable, EntityAmbientCreature, ENTITY_ITEM, ENTITY_PROJECTILE, /* * Hostile mobs */ ENTITY_ENDERMAN, ENTITY_ZOMBIE_PIGMAN, ENTITY_SPIDER, ENTITY_WITHER_SKELETON, ENTITY_WITHER, ENTITY_DRAGON, ENTITY_PHANTOM, ENTITY_DROWNED, ENTITY_EVOKER, ENTITY_STRAY, ENTITY_ELDER_GUARDIAN, ENTITY_CREEPER, ENTITY_VINDICATOR, ENTITY_ILLUSIONER, ENTITY_ZOMBIE, ENTITY_HUSK, ENTITY_SKELETON, ENTITY_SHULKER, ENTITY_SLIME, ENTITY_GUARDIAN, ENTITY_VEX, ENTITY_SILVERFISH, ENTITY_WITCH, ENTITY_GIANT, ENTITY_BLAZE, ENTITY_ENDERMITE, ENTITY_GHAST, ENTITY_MAGMA_CUBE, ENTITY_CAVE_SPIDER } }
src/main/java/me/deftware/client/framework/wrappers/entity/IEntity.java
package me.deftware.client.framework.wrappers.entity; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityOtherPlayerMP; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.entity.*; import net.minecraft.entity.EntityFlying; import net.minecraft.entity.boss.EntityWither; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.monster.*; import net.minecraft.entity.passive.*; import net.minecraft.entity.player.EntityPlayer; import java.awt.*; public class IEntity { private Entity entity; public IEntity(Entity e) { entity = e; } public Entity getEntity() { return entity; } public boolean isOnGround() { return ((EntityLivingBase) entity).onGround; } public void setStepHeight(float height) { entity.stepHeight = height; } public float getStepHeight() { return entity.stepHeight; } public Color getEntityColor() { if ((entity instanceof EntityAnimal)) { return Color.white; } if ((entity instanceof EntityMob)) { return Color.red; } if ((entity instanceof EntitySlime)) { return Color.green; } if ((entity instanceof EntityVillager)) { return new Color(245, 245, 220); } if ((entity instanceof EntityBat)) { return Color.BLACK; } if ((entity instanceof EntitySquid)) { return Color.PINK; } return Color.white; } public float getDistanceToPlayer() { return entity.getDistance(Minecraft.getMinecraft().player); } public String getName() { if (entity instanceof EntityPlayer) { return ((EntityPlayer) entity).getGameProfile().getName(); } return ""; } public boolean isDead() { return entity.isDead; } public boolean isMod() { return (entity instanceof EntityMob || entity instanceof EntityLiving) && !(entity instanceof EntityPlayer) && !(entity instanceof EntityItem); } public boolean isPlayer() { return entity instanceof EntityPlayer; } public boolean isItem() { return entity instanceof EntityItem; } public IItemEntity getIItemEntity() { return new IItemEntity(entity); } public IMob getIMob() { return new IMob(entity); } public IPlayer getIPlayer() { return new IPlayer((EntityPlayer) entity); } public float getHealth() { if (entity instanceof EntityLivingBase) { return ((EntityLivingBase) entity).getHealth(); } return 0; } public boolean isPlayerOwned() { if (entity instanceof EntityWolf) { if (((EntityWolf) entity).isOwner(Minecraft.getMinecraft().player)) { return true; } } return false; } public boolean isSleeping() { if (entity instanceof EntityPlayer) { return ((EntityLivingBase) entity).isPlayerSleeping(); } return false; } public boolean isInvisible() { if (entity instanceof EntityPlayer) { return ((EntityLivingBase) entity).isInvisible(); } return false; } public boolean isInvisibleToPlayer() { return entity.isInvisibleToPlayer(Minecraft.getMinecraft().player); } public boolean isSelf() { return entity == Minecraft.getMinecraft().player; } public double getPosX() { return entity.posX; } public double getPosY() { return entity.posY; } public double getPosZ() { return entity.posZ; } public double getPrevPosX() { return entity.prevPosX; } public double getPrevPosY() { return entity.prevPosY; } public double getPrevPosZ() { return entity.prevPosZ; } public double getEyeHeight() { return entity.getEyeHeight(); } public boolean canBeSeen() { return Minecraft.getMinecraft().player.canEntityBeSeen(entity); } public boolean isHostile() { if (entity instanceof EntityBlaze || entity instanceof EntityCreeper || entity instanceof EntityElderGuardian || entity instanceof EntityEndermite || entity instanceof EntityEvoker || entity instanceof EntityGhast || entity instanceof EntityGuardian || entity instanceof EntityHusk || entity instanceof EntityMagmaCube || entity instanceof EntityShulker || entity instanceof EntitySilverfish || entity instanceof EntitySkeleton || entity instanceof EntitySlime || entity instanceof EntitySpider || entity instanceof EntityStray || entity instanceof EntityVex || entity instanceof EntityVindicator || entity instanceof EntityWitch || entity instanceof EntityWitherSkeleton || entity instanceof EntityZombie || entity instanceof EntityZombieVillager || entity instanceof EntityWither) { return true; } else if (entity instanceof EntityChicken) { if (((EntityChicken) entity).chickenJockey) { return true; } } return false; } public boolean instanceOf(EntityType e) { // Generic types and players if (e.equals(EntityType.ENTITY_PLAYER_SP)) { return entity instanceof EntityPlayerSP; } else if (e.equals(EntityType.EntityOtherPlayerMP)) { return entity instanceof EntityOtherPlayerMP; } else if (e.equals(EntityType.ENTITY_PLAYER)) { return entity instanceof EntityPlayer; } else if (e.equals(EntityType.ENTITY_LIVING_BASE)) { return entity instanceof EntityLivingBase; } else if (e.equals(EntityType.ENTITY_LIVING)) { return entity instanceof EntityLiving; } else if (e.equals(EntityType.ENTITY_ITEM)) { return entity instanceof EntityItem; } // Mobs if (e.equals(EntityType.ENTITY_WOLF)) { return entity instanceof EntityWolf; } else if (e.equals(EntityType.Entity_Ageable)) { return entity instanceof EntityAgeable; } else if (e.equals(EntityType.EntityAmbientCreature)) { return entity instanceof EntityAmbientCreature; } else if (e.equals(EntityType.EntityWaterMob)) { return entity instanceof EntityWaterMob; } else if (e.equals(EntityType.EntityMob)) { return entity instanceof EntityMob; } else if (e.equals(EntityType.EntitySlime)) { return entity instanceof EntitySlime; } else if (e.equals(EntityType.EntityFlying)) { return entity instanceof EntityFlying; } else if (e.equals(EntityType.EntityGolem)) { return entity instanceof EntityGolem; } else if (e.equals(EntityType.ENTITY_SPIDER)) { return entity instanceof EntitySpider; } else if (e.equals(EntityType.ENTITY_SPIDER)) { return entity instanceof EntitySpider; } else if (e.equals(EntityType.ENTITY_ZOMBIE_PIGMAN)) { return entity instanceof EntityZombie; } else if (e.equals(EntityType.ENTITY_ENDERMAN)) { return entity instanceof EntityEnderman; } else if (e.equals(EntityType.ENTITY_WITHER_SKELETON)) { return entity instanceof EntityWitherSkeleton; } return false; } public enum EntityType { ENTITY_PLAYER_SP, EntityOtherPlayerMP, ENTITY_PLAYER, EntitySlime, EntityGolem, EntityFlying, EntityMob, EntityWaterMob, ENTITY_WOLF, ENTITY_LIVING_BASE, ENTITY_LIVING, Entity_Ageable, EntityAmbientCreature, ENTITY_ITEM, /* * Hostile mobs */ ENTITY_ENDERMAN, ENTITY_ZOMBIE_PIGMAN, ENTITY_SPIDER, ENTITY_WITHER_SKELETON } }
Add hostile mobs
src/main/java/me/deftware/client/framework/wrappers/entity/IEntity.java
Add hostile mobs
<ide><path>rc/main/java/me/deftware/client/framework/wrappers/entity/IEntity.java <ide> import net.minecraft.client.entity.EntityPlayerSP; <ide> import net.minecraft.entity.*; <ide> import net.minecraft.entity.EntityFlying; <del>import net.minecraft.entity.boss.EntityWither; <add>import net.minecraft.entity.boss.*; <ide> import net.minecraft.entity.item.EntityItem; <ide> import net.minecraft.entity.monster.*; <ide> import net.minecraft.entity.passive.*; <ide> return entity instanceof EntityLiving; <ide> } else if (e.equals(EntityType.ENTITY_ITEM)) { <ide> return entity instanceof EntityItem; <add> } else if (e.equals(EntityType.ENTITY_PROJECTILE)) { <add> return entity instanceof IProjectile; <ide> } <ide> // Mobs <ide> if (e.equals(EntityType.ENTITY_WOLF)) { <ide> return entity instanceof EntityWaterMob; <ide> } else if (e.equals(EntityType.EntityMob)) { <ide> return entity instanceof EntityMob; <del> } else if (e.equals(EntityType.EntitySlime)) { <add> } else if (e.equals(EntityType.EntitySlime) || e.equals(EntityType.ENTITY_SLIME)) { <ide> return entity instanceof EntitySlime; <ide> } else if (e.equals(EntityType.EntityFlying)) { <ide> return entity instanceof EntityFlying; <ide> } else if (e.equals(EntityType.EntityGolem)) { <ide> return entity instanceof EntityGolem; <del> } else if (e.equals(EntityType.ENTITY_SPIDER)) { <del> return entity instanceof EntitySpider; <ide> } else if (e.equals(EntityType.ENTITY_SPIDER)) { <ide> return entity instanceof EntitySpider; <ide> } else if (e.equals(EntityType.ENTITY_ZOMBIE_PIGMAN)) { <ide> return entity instanceof EntityEnderman; <ide> } else if (e.equals(EntityType.ENTITY_WITHER_SKELETON)) { <ide> return entity instanceof EntityWitherSkeleton; <add> } else if (e.equals(EntityType.ENTITY_WITHER)) { <add> return entity instanceof EntityWither; <add> } else if (e.equals(EntityType.ENTITY_DRAGON)) { <add> return entity instanceof EntityDragon; <add> } else if (e.equals(EntityType.ENTITY_PHANTOM)) { <add> return entity instanceof EntityPhantom; <add> } else if (e.equals(EntityType.ENTITY_DROWNED)) { <add> return entity instanceof EntityDrowned; <add> } else if (e.equals(EntityType.ENTITY_EVOKER)) { <add> return entity instanceof EntityEvoker; <add> } else if (e.equals(EntityType.ENTITY_STRAY)) { <add> return entity instanceof EntityStray; <add> } else if (e.equals(EntityType.ENTITY_ELDER_GUARDIAN)) { <add> return entity instanceof EntityElderGuardian; <add> } else if (e.equals(EntityType.ENTITY_CREEPER)) { <add> return entity instanceof EntityCreeper; <add> } else if (e.equals(EntityType.ENTITY_VINDICATOR)) { <add> return entity instanceof EntityVindicator; <add> } else if (e.equals(EntityType.ENTITY_ILLUSIONER)) { <add> return entity instanceof EntityIllusionIllager; <add> } else if (e.equals(EntityType.ENTITY_HUSK)) { <add> return entity instanceof EntityHusk; <add> } else if (e.equals(EntityType.ENTITY_ZOMBIE)) { <add> return entity instanceof EntityZombie; <add> } else if (e.equals(EntityType.ENTITY_SKELETON)) { <add> return entity instanceof EntitySkeleton; <add> } else if (e.equals(EntityType.ENTITY_SHULKER)) { <add> return entity instanceof EntityShulker; <add> } else if (e.equals(EntityType.ENTITY_GUARDIAN)) { <add> return entity instanceof EntityGuardian; <add> } else if (e.equals(EntityType.ENTITY_VEX)) { <add> return entity instanceof EntityVex; <add> } else if (e.equals(EntityType.ENTITY_SILVERFISH)) { <add> return entity instanceof EntitySilverfish; <add> } else if (e.equals(EntityType.ENTITY_WITCH)) { <add> return entity instanceof EntityWitch; <add> } else if (e.equals(EntityType.ENTITY_GIANT)) { <add> return entity instanceof EntityGiantZombie; <add> } else if (e.equals(EntityType.ENTITY_BLAZE)) { <add> return entity instanceof EntityBlaze; <add> } else if (e.equals(EntityType.ENTITY_ENDERMITE)) { <add> return entity instanceof EntityEndermite; <add> } else if (e.equals(EntityType.ENTITY_GHAST)) { <add> return entity instanceof EntityGhast; <add> } else if (e.equals(EntityType.ENTITY_MAGMA_CUBE)) { <add> return entity instanceof EntityMagmaCube; <add> } else if (e.equals(EntityType.ENTITY_CAVE_SPIDER)) { <add> return entity instanceof EntityCaveSpider; <ide> } <ide> return false; <ide> } <ide> <ide> public enum EntityType { <del> ENTITY_PLAYER_SP, EntityOtherPlayerMP, ENTITY_PLAYER, EntitySlime, EntityGolem, EntityFlying, EntityMob, EntityWaterMob, ENTITY_WOLF, ENTITY_LIVING_BASE, ENTITY_LIVING, Entity_Ageable, EntityAmbientCreature, ENTITY_ITEM, <add> ENTITY_PLAYER_SP, EntityOtherPlayerMP, ENTITY_PLAYER, EntitySlime, EntityGolem, EntityFlying, EntityMob, EntityWaterMob, ENTITY_WOLF, <add> ENTITY_LIVING_BASE, ENTITY_LIVING, Entity_Ageable, EntityAmbientCreature, ENTITY_ITEM, ENTITY_PROJECTILE, <ide> /* <ide> * Hostile mobs <ide> */ <del> ENTITY_ENDERMAN, ENTITY_ZOMBIE_PIGMAN, ENTITY_SPIDER, ENTITY_WITHER_SKELETON <add> ENTITY_ENDERMAN, ENTITY_ZOMBIE_PIGMAN, ENTITY_SPIDER, ENTITY_WITHER_SKELETON, ENTITY_WITHER, ENTITY_DRAGON, ENTITY_PHANTOM, ENTITY_DROWNED, <add> ENTITY_EVOKER, ENTITY_STRAY, ENTITY_ELDER_GUARDIAN, ENTITY_CREEPER, ENTITY_VINDICATOR, ENTITY_ILLUSIONER, ENTITY_ZOMBIE, ENTITY_HUSK, <add> ENTITY_SKELETON, ENTITY_SHULKER, ENTITY_SLIME, ENTITY_GUARDIAN, ENTITY_VEX, ENTITY_SILVERFISH, ENTITY_WITCH, ENTITY_GIANT, ENTITY_BLAZE, <add> ENTITY_ENDERMITE, ENTITY_GHAST, ENTITY_MAGMA_CUBE, ENTITY_CAVE_SPIDER <ide> } <ide> <ide> }
JavaScript
mit
82aa5ac6eab2f35f2019e4f7a0ff0cbb4ff5d78a
0
jmromeo/nhab,jmromeo/nhab,jmromeo/nhab,jmromeo/nhab
/** * @fileOverview Produces line chart * @todo fix zoom when reached min or max zoom * @todo add minimum number and maximum number of display points..used in zoom */ class DataVisualizer { /** * @class DataVisualizer * @classdesc DataVisualizer is a wrapper class for line-chart in chart.js. * * @param {string} id - ID of canvas that chart should be rendered on. * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {Integer[]} visualizerConfig[].data - Array with data to be displayed. Refresh chart must be called when data arrays are updated.When array is updated to display, call refresh chart. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. * @param {integer} visualizerConfig[].color - Color of data points. * @param {string} visualizerConfig[].name - Name of data to be used as label. * @param {string} visualizerConfig[].units - Units used for data (ie meters). * @param {string} visualizerConfig[].fill - Determines whether this data should be represented as area or line chart. * @param {integer} defaultNumDisplayPoints - Number of data points to display on the graph by default. * @param {function} tooltipCallback(index) - Pointer to function to be used when an element is selected on the graph. */ constructor(id, visualizerConfig, defaultNumDisplayPoints) { /** * Number of total packets added to chart. Only recalculated during chart refresh. * * @private * @name DataVisualizer#numPackets * @type Integer * @default 0 */ this.numPackets = 0; /** * Number of data points to display on line chart. * * @private * @name DataVisualizer#numDisplayPoints * @type Integer */ this.numDisplayPoints = defaultNumDisplayPoints; /** * Default number of points to display on zoom reset. * * @private * @name DataVisualizer#defaultNumDisplayPoints * @type Integer */ this.defaultNumDisplayPoints = defaultNumDisplayPoints; /** * Minimum number of points to display on graph. * * @private * @name DataVisualizer#minNumDisplayPoints * @type Integer */ this.minNumDisplayPoints = 5; /** * Maximum number of points to display on graph. * * @private * @name DataVisualizer#maxNumDisplayPoints * @type Integer */ this.maxNumDisplayPoints = 100; /** * If data is zoomed or panned, we will stop drawing new incoming datapoints. * * @private * @name DataVisualizer#zoomed * @type Bool * @default false */ this.zoomed = false; /** * If data is zoomed or panned, we will stop drawing new incoming datapoints. * * @private * @name DataVisualizer#panned * @type Bool * @default false */ this.panned = false; /** * The higher the number the slower the pan speed. Note it only changes * the speed of a flick scroll. * * @private * @name DataVisualizer#panSpeed * @type Integer * @default 10 */ this.panSpeed = 10; /** * The higher the number the faster zoom buttons and wheel zoom will occur. * * @private * @name DataVisualizer#zoomSpeed * @type Integer * @default 2 */ this.zoomSpeed = 2; /** * Index of first datapoint to be drawn. * * @private * @name DataVisualizer#startIndex * @type Integer */ this.startIndex = 0; /** * Index of last datapoint to be drawn. * * @private * @name DataVisualizer#startIndex * @type Integer */ this.endIndex = 0; /** * Chart object to be used by chartjs for chart manipulation> * * @private * @name DataVisualizer#chart * @type Object */ this.chart; /** * Data will hold all of the data that has been pushed to the graph, * whereas the charts datasets will only hold data to be displayed. * * @private * @name DataVisualizer#data * @type Array */ this.data = []; /** * Callback when a tooltip is showing with the index of the tooltip. * * @method * @name DataVisualizer#tooltipCallback * * @param {integer} index - Index of data showing in tooltip. */ this.tooltipCallback = tooltipCallback; /** * Reset zoom and pan and calls the tooltip callback with index of * last item. * * @method reset * @name DataVisualizer#reset */ this.reset = function() { this.zoomed = false; this.panned = false; // resetting to the end of the graph this.numDisplayPoints = this.defaultNumDisplayPoints; this.startIndex = this.numPackets - this.numDisplayPoints; // calling tooltip with the number of the last packet this.tooltipCallback(this.numPackets - 1); this.refreshChart(); }.bind(this) /** * Touch and scroll event handler used to pan the graph. * * @method pan * @name DataVisualizer#pan * * @param {event} e - Scroll or touch event. If null then it was from an *. on click action. Use panDirection value below if null. * @param {string} panDirection - "left" if pan left, "right" if pan right */ this.pan = function(e, panDirection) { // used for calculating how far we should pan var deltaX = 0; // "static" variable used to hold previous touch position this.pan.startX; this.panned = true; if (e == null) { if (panDirection == "right") { deltaX += (this.numDisplayPoints / 2) * this.panSpeed; } else { deltaX -= (this.numDisplayPoints / 2) * this.panSpeed; } } // in case of start of touch we shouldn't move at all, just need to grab touch position else if (e.type == "touchstart") { this.pan.startX = e.touches[0].clientX; } // calculate how far to pan based on how far fingers have scrolled since last touchmove else if (e.type == "touchmove") { // only allow 2 finger pan if (e.changedTouches.length > 0) { deltaX = this.pan.startX - e.changedTouches[0].clientX; this.pan.startX = e.changedTouches[0].clientX; } } // for wheel and scroll events else { deltaX = e.deltaX; // to fix issue where no scroll happens at all due to rounding of decimal values < |1| if (e.deltaX < 0) { deltaX = e.deltaX - this.panSpeed; } else { deltaX = e.deltaX + this.panSpeed; } } this.startIndex += (deltaX / this.panSpeed); if (this.startIndex < 0) { this.startIndex = 0; } if ((this.startIndex + this.numDisplayPoints) > this.numPackets) { this.startIndex = this.numPackets - this.numDisplayPoints; } // rounding both indexes as they will be used to index into arrays and need to be integers this.startIndex = Math.round(this.startIndex); this.endIndex = Math.round(this.startIndex + this.numDisplayPoints); // redrawing chart this.refreshChart(); // preventing a page scroll if (e != null) { e.preventDefault(); } }.bind(this); /** * Zooms in on graph. Can be attached to the onClick method of a zoom in * button to perform a zoom on the graph for each click. * * @method zoomIn * @name DataVisualizer#zoomIn */ this.zoomIn = function() { this.zoomed = true; this.numDisplayPoints -= this.zoomSpeed; // don't update graph if we're already displaying minimum number of points if (this.numDisplayPoints < this.minNumDisplayPoints) { this.numDisplayPoints = this.minNumDisplayPoints; } else { this.startIndex += this.zoomSpeed / 2; // if zooming would zoom off end of graph only zoom in left if ((this.startIndex + this.numDisplayPoints) < this.numPackets) { this.endIndex = this.startIndex + this.numDisplayPoints - this.zoomSpeed; } else { this.endIndex = this.numPackets; } this.refreshChart(); } }.bind(this) /** * Zooms out of graph. Can be attached to the onClick method of a zoom out * button to perform a zoom on the graph for each click. * * @method zoomOut * @name DataVisualizer#zoomOut */ this.zoomOut = function() { this.zoomed = true; // zooming out by adding more data points to the graph. Only allowing // a certain number of data points or else the graph gets too messy. this.numDisplayPoints += this.zoomSpeed; // don't update graph if we're already displaying the maximum number of points if (this.numDisplayPoints > this.maxNumDisplayPoints) { this.numDisplayPoints = this.maxNumDisplayPoints; } else { // calculating the start and end indexes of data to be drawn. if we // would draw off the end of the graph, then only zoom out left instead. this.startIndex -= this.zoomSpeed / 2; if ((this.startIndex + this.numDisplayPoints) < this.numPackets) { this.endIndex = this.startIndex + this.numDisplayPoints - this.zoomSpeed; } if (this.startIndex < 0) { this.startIndex = 0; } this.refreshChart(); } }.bind(this) /** * Event handler for mousewheel event that will allow zoom on mousewheel. * * @private * @method zoom * @name DataVisualizer#zoom * * @param {Object} e - Mousewheel event. * @param {string} e.deltaY - Amount mousewheel has moved in the vertical direction. */ this.zoom = function(e) { console.log("deltaX"); console.log(e.deltaX); console.log("deltaY"); console.log(e.deltaY); if (e.deltaY < -1) { this.zoomIn(); } else if (e.deltaY > 1) { this.zoomOut(); } e.preventDefault(); }.bind(this) // set scroll and touch event listeners on canvas to add pan and zoom capabilities // document.getElementById(id).addEventListener('wheel', this.zoom); document.getElementById(id).addEventListener('wheel', this.pan); document.getElementById(id).addEventListener('scroll', this.pan); document.getElementById(id).addEventListener('touchstart', this.pan); document.getElementById(id).addEventListener('touchmove', this.pan); /** * Displays or hides the data corresponding to the selected button. Requires that initToggleButtons has been called. * * @method toggleData * @name DataVisualizer#toggleData * * @param {Object} button - Button element corresponding to data to be toggled. */ this.toggleData = function(button) { var dataSetIndex = button.getAttribute('data-data-set-index'); var dataSet = this.chart.config.data.datasets[dataSetIndex]; var yAxis = this.chart.config.options.scales.yAxes[dataSetIndex]; button.classList.remove("altitude-color"); // toggling data and axis visibility dataSet.hidden = !dataSet.hidden; yAxis.display = !yAxis.display; this.chart.update(); }.bind(this) /** * Adds the specified data to each dataset. There should be 1 value for * each dataset in the graph. * * @method addDataPoint * @name DataVisualizer#addDataPoint * * @param {Objects[]} data - Array with 1 value for each dataset. * If a visible dataset, the value should be an integer. */ this.addDataPoint = function(data) { var i; var datasets = this.chart.config.data.datasets; for (i = 0; i < datasets.length; i++) { // pushing incoming data into overall data array this.data[i].push(data[i]); } this.refreshChart(); }.bind(this) /** * Adds the specified data to each dataset. There should be 1 value for * each dataset in the graph. * * @method addDataPoint * @name DataVisualizer#addDataPoint * * @param {Objects[]} data - Array with 1 value for each dataset. * If a visible dataset, the value should be an integer. */ this.refreshChart = function() { var i, j; var datasets = this.chart.config.data.datasets; this.numPackets = (this.zoomed == true) ? this.numPackets : this.data[0].length; if (!this.zoomed && !this.panned) { this.startIndex = (this.numPackets > this.numDisplayPoints) ? (this.numPackets - this.numDisplayPoints) : 0; this.endIndex = (this.numPackets > this.numDisplayPoints) ? (this.startIndex + this.numDisplayPoints) : this.numPackets; } // pushing data from overall array to display data array for (i = 0; i < datasets.length; i++) { datasets[i].data.length = 0; for (j = this.startIndex; j < this.endIndex; j++) { datasets[i].data.push(this.data[i][j]); } } packetIndex.length = 0; for (i = this.startIndex; i < this.endIndex; i++) { packetIndex.push(i); } this.chart.update(); }.bind(this) /** * Initializes the graph using the parameters passed in through the visualizerConfig objects. * Also initializes the data array. * * @method initGraph * @name DataVisualizer#initGraph * * @private * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. * @param {integer} visualizerConfig[].color - Color of data points. * @param {string} visualizerConfig[].name - Name of data to be used as label. * @param {string} visualizerConfig[].units - Units used for data (ie meters). * @param {string} visualizerConfig[].fill - Determines whether this data should be represented as area or line chart. */ this.initGraph = function(visualizerConfig) { for (var i = 0; i < visualizerConfig.length; i++) { this.data.push(visualizerConfig[i].data); // Configuring the data set options var dataConfig = { label: visualizerConfig[i].name, borderColor: visualizerConfig[i].color, data: [], backgroundColor: Samples.utils.transparentize(visualizerConfig[i].color, 0.9), fill: visualizerConfig[i].fill, yAxisID: visualizerConfig[i].buttonId }; // Configuring the axes options var axesConfig = { id: visualizerConfig[i].buttonId, display: true, position: (i % 2) ? "left" : "right", scaleLabel: { display: false, labelString: visualizerConfig[i].units }, ticks: { callback: (function(units) { return function(label, index, labels) { if (index == 0) { return units; } return Math.round(label); } })(visualizerConfig[i].units) } }; // push configuration to config array dataSetsConfig.push(dataConfig); yAxesConfig.push(axesConfig); } // setting up config variable for chart config.options.scales.yaxes = yAxesConfig; config.data.datasets = dataSetsConfig; // creating callback for tooltip config.options.tooltips.callbacks.beforeTitle = function(tooltipItem, data) { this.tooltipCallback(parseInt(tooltipItem[0].xLabel)); }.bind(this) }.bind(this) /** * Initializes toggle buttons to reference correct dataset. * * @method initToggleButtons * @name DataVisualizer#initToggleButtons * * @private * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. */ function initToggleButtons(visualizerConfig) { var button; for (var i = 0; i < visualizerConfig.length; i++) { button = document.getElementById(visualizerConfig[i].buttonId); button.setAttribute('data-data-set-index', visualizerConfig[i].datasetIndex); } } // initialize graph configuration this.initGraph(visualizerConfig); // initializing toggle buttons initToggleButtons(visualizerConfig); // creating and configuring chart var ctx = document.getElementById(id).getContext("2d"); this.chart = new Chart(ctx, config); Chart.defaults.global.defaultFontColor = "#ebebeb"; Chart.defaults.global.defaultFontFamily = "'Lato','Helvetica Neue','Helvetica','Arial',sans-serif"; } }
tracker/datavisualizer/datavisualizer.js
/** * @fileOverview Produces line chart * @todo fix zoom when reached min or max zoom * @todo add minimum number and maximum number of display points..used in zoom */ class DataVisualizer { /** * @class DataVisualizer * @classdesc DataVisualizer is a wrapper class for line-chart in chart.js. * * @param {string} id - ID of canvas that chart should be rendered on. * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {Integer[]} visualizerConfig[].data - Array with data to be displayed. Refresh chart must be called when data arrays are updated.When array is updated to display, call refresh chart. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. * @param {integer} visualizerConfig[].color - Color of data points. * @param {string} visualizerConfig[].name - Name of data to be used as label. * @param {string} visualizerConfig[].units - Units used for data (ie meters). * @param {string} visualizerConfig[].fill - Determines whether this data should be represented as area or line chart. * @param {integer} defaultNumDisplayPoints - Number of data points to display on the graph by default. * @param {function} tooltipCallback(index) - Pointer to function to be used when an element is selected on the graph. */ constructor(id, visualizerConfig, defaultNumDisplayPoints) { /** * Number of total packets added to chart. Only recalculated during chart refresh. * * @private * @name DataVisualizer#numPackets * @type Integer * @default 0 */ this.numPackets = 0; /** * Number of data points to display on line chart. * * @private * @name DataVisualizer#numDisplayPoints * @type Integer */ this.numDisplayPoints = defaultNumDisplayPoints; /** * Default number of points to display on zoom reset. * * @private * @name DataVisualizer#defaultNumDisplayPoints * @type Integer */ this.defaultNumDisplayPoints = defaultNumDisplayPoints; /** * Minimum number of points to display on graph. * * @private * @name DataVisualizer#minNumDisplayPoints * @type Integer */ this.minNumDisplayPoints = 5; /** * Maximum number of points to display on graph. * * @private * @name DataVisualizer#maxNumDisplayPoints * @type Integer */ this.maxNumDisplayPoints = 100; /** * If data is zoomed or panned, we will stop drawing new incoming datapoints. * * @private * @name DataVisualizer#zoomed * @type Bool * @default false */ this.zoomed = false; /** * If data is zoomed or panned, we will stop drawing new incoming datapoints. * * @private * @name DataVisualizer#panned * @type Bool * @default false */ this.panned = false; /** * The higher the number the slower the pan speed. Note it only changes * the speed of a flick scroll. * * @private * @name DataVisualizer#panSpeed * @type Integer * @default 10 */ this.panSpeed = 10; /** * The higher the number the faster zoom buttons and wheel zoom will occur. * * @private * @name DataVisualizer#zoomSpeed * @type Integer * @default 2 */ this.zoomSpeed = 2; /** * Index of first datapoint to be drawn. * * @private * @name DataVisualizer#startIndex * @type Integer */ this.startIndex = 0; /** * Index of last datapoint to be drawn. * * @private * @name DataVisualizer#startIndex * @type Integer */ this.endIndex = 0; /** * Chart object to be used by chartjs for chart manipulation> * * @private * @name DataVisualizer#chart * @type Object */ this.chart; /** * Data will hold all of the data that has been pushed to the graph, * whereas the charts datasets will only hold data to be displayed. * * @private * @name DataVisualizer#data * @type Array */ this.data = []; /** * Callback when a tooltip is showing with the index of the tooltip. * * @method * @name DataVisualizer#tooltipCallback * * @param {integer} index - Index of data showing in tooltip. */ this.tooltipCallback = tooltipCallback; /** * Reset zoom and pan and calls the tooltip callback with index of * last item. * * @method reset * @name DataVisualizer#reset */ this.reset = function() { this.zoomed = false; this.panned = false; // resetting to the end of the graph this.numDisplayPoints = this.defaultNumDisplayPoints; this.startIndex = this.numPackets - this.numDisplayPoints; // calling tooltip with the number of the last packet this.tooltipCallback(this.numPackets - 1); this.refreshChart(); }.bind(this) /** * Touch and scroll event handler used to pan the graph. * * @method pan * @name DataVisualizer#pan * * @param {event} e - Scroll or touch event. If null then it was from an *. on click action. Use panDirection value below if null. * @param {string} panDirection - "left" if pan left, "right" if pan right */ this.pan = function(e, panDirection) { // used for calculating how far we should pan var deltaX = 0; // "static" variable used to hold previous touch position this.pan.startX; this.panned = true; if (e == null) { if (panDirection == "right") { deltaX += (this.numDisplayPoints / 2) * this.panSpeed; } else { deltaX -= (this.numDisplayPoints / 2) * this.panSpeed; } } // in case of start of touch we shouldn't move at all, just need to grab touch position else if (e.type == "touchstart") { this.pan.startX = e.touches[0].clientX; } // calculate how far to pan based on how far fingers have scrolled since last touchmove else if (e.type == "touchmove") { // only allow 2 finger pan if (e.changedTouches.length > 0) { deltaX = this.pan.startX - e.changedTouches[0].clientX; this.pan.startX = e.changedTouches[0].clientX; } } // for wheel and scroll events else { // to fix issue where no scroll happens at all due to rounding of decimal values < |1| if (e.deltaX < 0) { deltaX = e.deltaX - this.panSpeed; } else { deltaX = e.deltaX + this.panSpeed; } } this.startIndex += (deltaX / this.panSpeed); if (this.startIndex < 0) { this.startIndex = 0; } if ((this.startIndex + this.numDisplayPoints) > this.numPackets) { this.startIndex = this.numPackets - this.numDisplayPoints; } // rounding both indexes as they will be used to index into arrays and need to be integers this.startIndex = Math.round(this.startIndex); this.endIndex = Math.round(this.startIndex + this.numDisplayPoints); // redrawing chart this.refreshChart(); // preventing a page scroll if (e != null) { e.preventDefault(); } }.bind(this); /** * Zooms in on graph. Can be attached to the onClick method of a zoom in * button to perform a zoom on the graph for each click. * * @method zoomIn * @name DataVisualizer#zoomIn */ this.zoomIn = function() { this.zoomed = true; this.numDisplayPoints -= this.zoomSpeed; // don't update graph if we're already displaying minimum number of points if (this.numDisplayPoints < this.minNumDisplayPoints) { this.numDisplayPoints = this.minNumDisplayPoints; } else { this.startIndex += this.zoomSpeed / 2; // if zooming would zoom off end of graph only zoom in left if ((this.startIndex + this.numDisplayPoints) < this.numPackets) { this.endIndex = this.startIndex + this.numDisplayPoints - this.zoomSpeed; } else { this.endIndex = this.numPackets; } this.refreshChart(); } }.bind(this) /** * Zooms out of graph. Can be attached to the onClick method of a zoom out * button to perform a zoom on the graph for each click. * * @method zoomOut * @name DataVisualizer#zoomOut */ this.zoomOut = function() { this.zoomed = true; // zooming out by adding more data points to the graph. Only allowing // a certain number of data points or else the graph gets too messy. this.numDisplayPoints += this.zoomSpeed; // don't update graph if we're already displaying the maximum number of points if (this.numDisplayPoints > this.maxNumDisplayPoints) { this.numDisplayPoints = this.maxNumDisplayPoints; } else { // calculating the start and end indexes of data to be drawn. if we // would draw off the end of the graph, then only zoom out left instead. this.startIndex -= this.zoomSpeed / 2; if ((this.startIndex + this.numDisplayPoints) < this.numPackets) { this.endIndex = this.startIndex + this.numDisplayPoints - this.zoomSpeed; } if (this.startIndex < 0) { this.startIndex = 0; } this.refreshChart(); } }.bind(this) /** * Event handler for mousewheel event that will allow zoom on mousewheel. * * @private * @method zoom * @name DataVisualizer#zoom * * @param {Object} e - Mousewheel event. * @param {string} e.deltaY - Amount mousewheel has moved in the vertical direction. */ this.zoom = function(e) { if (e.deltaY < 0) { this.zoomIn(); } else { this.zoomOut(); } e.preventDefault(); }.bind(this) // set scroll and touch event listeners on canvas to add pan and zoom capabilities document.getElementById(id).addEventListener('wheel', this.zoom); document.getElementById(id).addEventListener('scroll', this.pan); document.getElementById(id).addEventListener('touchstart', this.pan); document.getElementById(id).addEventListener('touchmove', this.pan); /** * Displays or hides the data corresponding to the selected button. Requires that initToggleButtons has been called. * * @method toggleData * @name DataVisualizer#toggleData * * @param {Object} button - Button element corresponding to data to be toggled. */ this.toggleData = function(button) { var dataSetIndex = button.getAttribute('data-data-set-index'); var dataSet = this.chart.config.data.datasets[dataSetIndex]; var yAxis = this.chart.config.options.scales.yAxes[dataSetIndex]; button.classList.remove("altitude-color"); // toggling data and axis visibility dataSet.hidden = !dataSet.hidden; yAxis.display = !yAxis.display; this.chart.update(); }.bind(this) /** * Adds the specified data to each dataset. There should be 1 value for * each dataset in the graph. * * @method addDataPoint * @name DataVisualizer#addDataPoint * * @param {Objects[]} data - Array with 1 value for each dataset. * If a visible dataset, the value should be an integer. */ this.addDataPoint = function(data) { var i; var datasets = this.chart.config.data.datasets; for (i = 0; i < datasets.length; i++) { // pushing incoming data into overall data array this.data[i].push(data[i]); } this.refreshChart(); }.bind(this) /** * Adds the specified data to each dataset. There should be 1 value for * each dataset in the graph. * * @method addDataPoint * @name DataVisualizer#addDataPoint * * @param {Objects[]} data - Array with 1 value for each dataset. * If a visible dataset, the value should be an integer. */ this.refreshChart = function() { var i, j; var datasets = this.chart.config.data.datasets; this.numPackets = (this.zoomed == true) ? this.numPackets : this.data[0].length; if (!this.zoomed && !this.panned) { this.startIndex = (this.numPackets > this.numDisplayPoints) ? (this.numPackets - this.numDisplayPoints) : 0; this.endIndex = (this.numPackets > this.numDisplayPoints) ? (this.startIndex + this.numDisplayPoints) : this.numPackets; } // pushing data from overall array to display data array for (i = 0; i < datasets.length; i++) { datasets[i].data.length = 0; for (j = this.startIndex; j < this.endIndex; j++) { datasets[i].data.push(this.data[i][j]); } } packetIndex.length = 0; for (i = this.startIndex; i < this.endIndex; i++) { packetIndex.push(i); } this.chart.update(); }.bind(this) /** * Initializes the graph using the parameters passed in through the visualizerConfig objects. * Also initializes the data array. * * @method initGraph * @name DataVisualizer#initGraph * * @private * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. * @param {integer} visualizerConfig[].color - Color of data points. * @param {string} visualizerConfig[].name - Name of data to be used as label. * @param {string} visualizerConfig[].units - Units used for data (ie meters). * @param {string} visualizerConfig[].fill - Determines whether this data should be represented as area or line chart. */ this.initGraph = function(visualizerConfig) { for (var i = 0; i < visualizerConfig.length; i++) { this.data.push(visualizerConfig[i].data); // Configuring the data set options var dataConfig = { label: visualizerConfig[i].name, borderColor: visualizerConfig[i].color, data: [], backgroundColor: Samples.utils.transparentize(visualizerConfig[i].color, 0.9), fill: visualizerConfig[i].fill, yAxisID: visualizerConfig[i].buttonId }; // Configuring the axes options var axesConfig = { id: visualizerConfig[i].buttonId, display: true, position: (i % 2) ? "left" : "right", scaleLabel: { display: false, labelString: visualizerConfig[i].units }, ticks: { callback: (function(units) { return function(label, index, labels) { if (index == 0) { return units; } return Math.round(label); } })(visualizerConfig[i].units) } }; // push configuration to config array dataSetsConfig.push(dataConfig); yAxesConfig.push(axesConfig); } // setting up config variable for chart config.options.scales.yaxes = yAxesConfig; config.data.datasets = dataSetsConfig; // creating callback for tooltip config.options.tooltips.callbacks.beforeTitle = function(tooltipItem, data) { this.tooltipCallback(parseInt(tooltipItem[0].xLabel)); }.bind(this) }.bind(this) /** * Initializes toggle buttons to reference correct dataset. * * @method initToggleButtons * @name DataVisualizer#initToggleButtons * * @private * @param {Objects[]} visualizerConfig - The configuration for y-axes data. * @param {string} visualizerConfig[].buttonId - The ID of toggle button corresponding to y-axes data. * @param {integer} visualizerConfig[].datasetIndex - Index attribute set on buttonId. */ function initToggleButtons(visualizerConfig) { var button; for (var i = 0; i < visualizerConfig.length; i++) { button = document.getElementById(visualizerConfig[i].buttonId); button.setAttribute('data-data-set-index', visualizerConfig[i].datasetIndex); } } // initialize graph configuration this.initGraph(visualizerConfig); // initializing toggle buttons initToggleButtons(visualizerConfig); // creating and configuring chart var ctx = document.getElementById(id).getContext("2d"); this.chart = new Chart(ctx, config); Chart.defaults.global.defaultFontColor = "#ebebeb"; Chart.defaults.global.defaultFontFamily = "'Lato','Helvetica Neue','Helvetica','Arial',sans-serif"; } }
Removed wheel zoom, doesn't work well with pan
tracker/datavisualizer/datavisualizer.js
Removed wheel zoom, doesn't work well with pan
<ide><path>racker/datavisualizer/datavisualizer.js <ide> // for wheel and scroll events <ide> else <ide> { <add> deltaX = e.deltaX; <ide> // to fix issue where no scroll happens at all due to rounding of decimal values < |1| <ide> if (e.deltaX < 0) <ide> { <ide> * @param {string} e.deltaY - Amount mousewheel has moved in the vertical direction. <ide> */ <ide> this.zoom = function(e) { <del> if (e.deltaY < 0) { <add> console.log("deltaX"); <add> console.log(e.deltaX); <add> console.log("deltaY"); <add> console.log(e.deltaY); <add> <add> if (e.deltaY < -1) { <ide> this.zoomIn(); <ide> } <del> else { <add> else if (e.deltaY > 1) { <ide> this.zoomOut(); <ide> } <ide> <ide> <ide> <ide> // set scroll and touch event listeners on canvas to add pan and zoom capabilities <del> document.getElementById(id).addEventListener('wheel', this.zoom); <add>// document.getElementById(id).addEventListener('wheel', this.zoom); <add> document.getElementById(id).addEventListener('wheel', this.pan); <ide> document.getElementById(id).addEventListener('scroll', this.pan); <ide> document.getElementById(id).addEventListener('touchstart', this.pan); <ide> document.getElementById(id).addEventListener('touchmove', this.pan);