code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
| line_mean
float64 0.5
100
| line_max
int64 1
1k
| alpha_frac
float64 0.25
1
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
public class lb_BirdController : MonoBehaviour {
public int idealNumberOfBirds;
public int maximumNumberOfBirds;
public Camera currentCamera;
public float unspawnDistance = 10.0f;
public bool highQuality = true;
public bool collideWithObjects = true;
public LayerMask groundLayer;
public float birdScale = 1.0f;
public bool robin = true;
public bool blueJay = true;
public bool cardinal = true;
public bool chickadee = true;
public bool sparrow = true;
public bool goldFinch = true;
public bool crow = true;
bool pause = false;
GameObject[] myBirds;
List<string> myBirdTypes = new List<string>();
List<GameObject> birdGroundTargets = new List<GameObject>();
List<GameObject> birdPerchTargets = new List<GameObject>();
int activeBirds = 0;
int birdIndex = 0;
GameObject[] featherEmitters = new GameObject[3];
public void AllFlee(){
if(!pause){
for(int i=0;i<myBirds.Length;i++){
if(myBirds[i].activeSelf){
myBirds[i].SendMessage ("Flee");
}
}
}
}
public void Pause(){
if(pause){
AllUnPause ();
}else{
AllPause ();
}
}
public void AllPause(){
pause = true;
for(int i=0;i<myBirds.Length;i++){
if(myBirds[i].activeSelf){
myBirds[i].SendMessage ("PauseBird");
}
}
}
public void AllUnPause(){
pause = false;
for(int i=0;i<myBirds.Length;i++){
if(myBirds[i].activeSelf){
myBirds[i].SendMessage ("UnPauseBird");
}
}
}
public void SpawnAmount(int amt){
for(int i=0;i<=amt;i++){
SpawnBird ();
}
}
public void ChangeCamera(Camera cam){
currentCamera = cam;
}
void Start () {
//find the camera
if (currentCamera == null){
currentCamera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent<Camera>();
}
if(idealNumberOfBirds >= maximumNumberOfBirds){
idealNumberOfBirds = maximumNumberOfBirds-1;
}
//set up the bird types to use
if(robin){
myBirdTypes.Add ("lb_robin");
}
if (blueJay){
myBirdTypes.Add ("lb_blueJay");
}
if(cardinal){
myBirdTypes.Add ("lb_cardinal");
}
if(chickadee){
myBirdTypes.Add ("lb_chickadee");
}
if(sparrow){
myBirdTypes.Add ("lb_sparrow");
}
if(goldFinch){
myBirdTypes.Add ("lb_goldFinch");
}
if(crow){
myBirdTypes.Add ("lb_crow");
}
//Instantiate birds based on amounts and bird types
myBirds = new GameObject[maximumNumberOfBirds];
GameObject bird;
for(int i=0;i<myBirds.Length;i++){
if(highQuality){
bird = Resources.Load (myBirdTypes[Random.Range (0,myBirdTypes.Count)]+"HQ",typeof(GameObject)) as GameObject;
}else{
bird = Resources.Load (myBirdTypes[Random.Range (0,myBirdTypes.Count)],typeof(GameObject)) as GameObject;
}
myBirds[i] = Instantiate (bird,Vector3.zero,Quaternion.identity) as GameObject;
myBirds[i].transform.localScale = myBirds[i].transform.localScale*birdScale;
myBirds[i].transform.parent = transform;
myBirds[i].SendMessage ("SetController",this);
myBirds[i].SetActive (false);
}
//find all the targets
GameObject[] groundTargets = GameObject.FindGameObjectsWithTag("lb_groundTarget");
GameObject[] perchTargets = GameObject.FindGameObjectsWithTag("lb_perchTarget");
for (int i=0;i<groundTargets.Length;i++){
if(Vector3.Distance (groundTargets[i].transform.position,currentCamera.transform.position)<unspawnDistance){
birdGroundTargets.Add(groundTargets[i]);
}
}
for (int i=0;i<perchTargets.Length;i++){
if(Vector3.Distance (perchTargets[i].transform.position,currentCamera.transform.position)<unspawnDistance){
birdPerchTargets.Add(perchTargets[i]);
}
}
//instantiate 3 feather emitters for killing the birds
GameObject fEmitter = Resources.Load ("featherEmitter",typeof(GameObject)) as GameObject;
for(int i=0;i<3;i++){
featherEmitters[i] = Instantiate (fEmitter,Vector3.zero,Quaternion.identity) as GameObject;
featherEmitters[i].transform.parent = transform;
featherEmitters[i].SetActive (false);
}
}
void OnEnable(){
InvokeRepeating("UpdateBirds",1,1);
StartCoroutine("UpdateTargets");
}
Vector3 FindPointInGroundTarget(GameObject target){
//find a random point within the collider of a ground target that touches the ground
Vector3 point;
point.x = Random.Range (target.GetComponent<Collider>().bounds.max.x,target.GetComponent<Collider>().bounds.min.x);
point.y = target.GetComponent<Collider>().bounds.max.y;
point.z = Random.Range (target.GetComponent<Collider>().bounds.max.z,target.GetComponent<Collider>().bounds.min.z);
//raycast down until it hits the ground
RaycastHit hit;
if (Physics.Raycast (point,-Vector3.up,out hit,target.GetComponent<Collider>().bounds.size.y,groundLayer)){
return hit.point;
}
return point;
}
void UpdateBirds(){
//this function is called once a second
if(activeBirds < idealNumberOfBirds && AreThereActiveTargets()){
//if there are less than ideal birds active, spawn a bird
SpawnBird();
}else if(activeBirds < maximumNumberOfBirds && Random.value < .05 && AreThereActiveTargets()){
//if there are less than maximum birds active spawn a bird every 20 seconds
SpawnBird();
}
//check one bird every second to see if it should be unspawned
if(myBirds[birdIndex].activeSelf && BirdOffCamera (myBirds[birdIndex].transform.position) && Vector3.Distance(myBirds[birdIndex].transform.position,currentCamera.transform.position) > unspawnDistance){
//if the bird is off camera and at least unsapwnDistance units away lets unspawn
Unspawn(myBirds[birdIndex]);
}
birdIndex = birdIndex == myBirds.Length-1 ? 0:birdIndex+1;
}
//this function will cycle through targets removing those outside of the unspawnDistance
//it will also add any new targets that come into range
IEnumerator UpdateTargets(){
List<GameObject> gtRemove = new List<GameObject>();
List<GameObject> ptRemove = new List<GameObject>();
while(true){
gtRemove.Clear();
ptRemove.Clear();
//check targets to see if they are out of range
for(int i=0;i<birdGroundTargets.Count;i++){
if (Vector3.Distance (birdGroundTargets[i].transform.position,currentCamera.transform.position)>unspawnDistance){
gtRemove.Add (birdGroundTargets[i]);
}
yield return 0;
}
for (int i=0;i<birdPerchTargets.Count;i++){
if (Vector3.Distance (birdPerchTargets[i].transform.position,currentCamera.transform.position)>unspawnDistance){
ptRemove.Add (birdPerchTargets[i]);
}
yield return 0;
}
//remove any targets that have been found out of range
foreach (GameObject entry in gtRemove){
birdGroundTargets.Remove(entry);
}
foreach (GameObject entry in ptRemove){
birdPerchTargets.Remove(entry);
}
yield return 0;
//now check for any new Targets
Collider[] hits = Physics.OverlapSphere(currentCamera.transform.position,unspawnDistance);
foreach(Collider hit in hits){
if (hit.tag == "lb_groundTarget" && !birdGroundTargets.Contains (hit.gameObject)){
birdGroundTargets.Add (hit.gameObject);
}
if (hit.tag == "lb_perchTarget" && !birdPerchTargets.Contains (hit.gameObject)){
birdPerchTargets.Add (hit.gameObject);
}
}
yield return 0;
}
}
bool BirdOffCamera(Vector3 birdPos){
Vector3 screenPos = currentCamera.WorldToViewportPoint(birdPos);
if (screenPos.x < 0 || screenPos.x > 1 || screenPos.y < 0 || screenPos.y > 1){
return true;
}else{
return false;
}
}
void Unspawn(GameObject bird){
bird.transform.position = Vector3.zero;
bird.SetActive (false);
activeBirds --;
}
void SpawnBird(){
if (!pause){
GameObject bird = null;
int randomBirdIndex = Mathf.FloorToInt (Random.Range (0,myBirds.Length));
int loopCheck = 0;
//find a random bird that is not active
while(bird == null){
if(myBirds[randomBirdIndex].activeSelf == false){
bird = myBirds[randomBirdIndex];
}
randomBirdIndex = randomBirdIndex+1 >= myBirds.Length ? 0:randomBirdIndex+1;
loopCheck ++;
if (loopCheck >= myBirds.Length){
//all birds are active
return;
}
}
//Find a point off camera to positon the bird and activate it
bird.transform.position = FindPositionOffCamera();
if(bird.transform.position == Vector3.zero){
//couldnt find a suitable spawn point
return;
}else{
bird.SetActive (true);
activeBirds++;
BirdFindTarget(bird);
}
}
}
bool AreThereActiveTargets(){
if (birdGroundTargets.Count > 0 || birdPerchTargets.Count > 0){
return true;
}else{
return false;
}
}
Vector3 FindPositionOffCamera(){
RaycastHit hit;
float dist = Random.Range (2,10);
Vector3 ray = -currentCamera.transform.forward;
int loopCheck = 0;
//find a random ray pointing away from the cameras field of view
ray += new Vector3(Random.Range (-.5f,.5f),Random.Range (-.5f,.5f),Random.Range (-.5f,.5f));
//cycle through random rays until we find one that doesnt hit anything
while(Physics.Raycast(currentCamera.transform.position,ray,out hit,dist)){
dist = Random.Range (2,10);
loopCheck++;
if (loopCheck > 35){
//can't find any good spawn points so lets cancel
return Vector3.zero;
}
}
return currentCamera.transform.position+(ray*dist);
}
void BirdFindTarget(GameObject bird){
//yield return new WaitForSeconds(1);
GameObject target;
if (birdGroundTargets.Count > 0 || birdPerchTargets.Count > 0){
//pick a random target based on the number of available targets vs the area of ground targets
//each perch target counts for .3 area, each ground target's area is calculated
float gtArea=0.0f;
float ptArea=birdPerchTargets.Count*0.3f;
for (int i=0;i<birdGroundTargets.Count;i++){
gtArea += birdGroundTargets[i].GetComponent<Collider>().bounds.size.x*birdGroundTargets[i].GetComponent<Collider>().bounds.size.z;
}
if (ptArea == 0.0f || Random.value < gtArea/(gtArea+ptArea)){
target = birdGroundTargets[Mathf.FloorToInt (Random.Range (0,birdGroundTargets.Count))];
bird.SendMessage ("FlyToTarget",FindPointInGroundTarget(target));
}else{
target = birdPerchTargets[Mathf.FloorToInt (Random.Range (0,birdPerchTargets.Count))];
bird.SendMessage ("FlyToTarget",target.transform.position);
}
}else{
bird.SendMessage ("FlyToTarget",currentCamera.transform.position+new Vector3(Random.Range (-100,100),Random.Range (5,10),Random.Range(-100,100)));
}
}
void FeatherEmit(Vector3 pos){
foreach (GameObject fEmit in featherEmitters){
if(!fEmit.activeSelf){
fEmit.transform.position = pos;
fEmit.SetActive (true);
StartCoroutine("DeactivateFeathers",fEmit);
break;
}
}
}
IEnumerator DeactivateFeathers(GameObject featherEmit){
yield return new WaitForSeconds(4.5f);
featherEmit.SetActive (false);
}
}
|
pierredepaz/alternate-realities
|
projects/tavius/02_EmergingEmotion/Assets/living birds/scripts/lb_BirdController.cs
|
C#
|
unlicense
| 10,800 | 30.48105 | 203 | 0.708001 | false |
//===--- HeapObject.cpp - Swift Language ABI Allocation Support -----------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See http://swift.org/LICENSE.txt for license information
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// Allocation ABI Shims While the Language is Bootstrapped
//
//===----------------------------------------------------------------------===//
#include "swift/Basic/Lazy.h"
#include "swift/Runtime/HeapObject.h"
#include "swift/Runtime/Heap.h"
#include "swift/Runtime/Metadata.h"
#include "swift/ABI/System.h"
#include "llvm/Support/MathExtras.h"
#include "MetadataCache.h"
#include "Private.h"
#include "swift/Runtime/Debug.h"
#include <algorithm>
#include <cassert>
#include <cstring>
#include <cstdio>
#include <cstdlib>
#include <unistd.h>
#include "../SwiftShims/RuntimeShims.h"
#if SWIFT_OBJC_INTEROP
# include <objc/NSObject.h>
# include <objc/runtime.h>
# include <objc/message.h>
# include <objc/objc.h>
#include "swift/Runtime/ObjCBridge.h"
#endif
#include "Leaks.h"
using namespace swift;
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
HeapObject *
swift::swift_allocObject(HeapMetadata const *metadata,
size_t requiredSize,
size_t requiredAlignmentMask)
SWIFT_CC(RegisterPreservingCC_IMPL) {
return SWIFT_RT_ENTRY_REF(swift_allocObject)(metadata, requiredSize,
requiredAlignmentMask);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
HeapObject *
SWIFT_RT_ENTRY_IMPL(swift_allocObject)(HeapMetadata const *metadata,
size_t requiredSize,
size_t requiredAlignmentMask)
SWIFT_CC(RegisterPreservingCC_IMPL) {
assert(isAlignmentMask(requiredAlignmentMask));
auto object = reinterpret_cast<HeapObject *>(
SWIFT_RT_ENTRY_CALL(swift_slowAlloc)(requiredSize,
requiredAlignmentMask));
// FIXME: this should be a placement new but that adds a null check
object->metadata = metadata;
object->refCount.init();
object->weakRefCount.init();
// If leak tracking is enabled, start tracking this object.
SWIFT_LEAKS_START_TRACKING_OBJECT(object);
return object;
}
HeapObject *
swift::swift_initStackObject(HeapMetadata const *metadata,
HeapObject *object) {
object->metadata = metadata;
object->refCount.init();
object->weakRefCount.initForNotDeallocating();
return object;
}
void
swift::swift_verifyEndOfLifetime(HeapObject *object) {
if (object->refCount.getCount() != 0)
swift::fatalError(/* flags = */ 0,
"fatal error: stack object escaped\n");
if (object->weakRefCount.getCount() != 1)
swift::fatalError(/* flags = */ 0,
"fatal error: weak/unowned reference to stack object\n");
}
/// \brief Allocate a reference-counted object on the heap that
/// occupies <size> bytes of maximally-aligned storage. The object is
/// uninitialized except for its header.
SWIFT_RUNTIME_EXPORT
extern "C" HeapObject* swift_bufferAllocate(
HeapMetadata const* bufferType, size_t size, size_t alignMask)
{
return swift::SWIFT_RT_ENTRY_CALL(swift_allocObject)(bufferType, size,
alignMask);
}
/// \brief Another entrypoint for swift_bufferAllocate.
/// It is generated by the compiler in some corner cases, e.g. if a serialized
/// optimized module is imported into a non-optimized main module.
/// TODO: This is only a workaround. Remove this function as soon as we can
/// get rid of the llvm SwiftStackPromotion pass.
SWIFT_RUNTIME_EXPORT
extern "C" HeapObject* swift_bufferAllocateOnStack(
HeapMetadata const* bufferType, size_t size, size_t alignMask) {
return swift::SWIFT_RT_ENTRY_CALL(swift_allocObject)(bufferType, size,
alignMask);
}
/// \brief Called at the end of the lifetime of an object returned by
/// swift_bufferAllocateOnStack.
/// It is generated by the compiler in some corner cases, e.g. if a serialized
/// optimized module is imported into a non-optimized main module.
/// TODO: This is only a workaround. Remove this function as soon as we can
/// get rid of the llvm SwiftStackPromotion pass.
SWIFT_RUNTIME_EXPORT
extern "C" void swift_bufferDeallocateFromStack(HeapObject *) {
}
SWIFT_RUNTIME_EXPORT
extern "C" intptr_t swift_bufferHeaderSize() { return sizeof(HeapObject); }
namespace {
/// Heap metadata for a box, which may have been generated statically by the
/// compiler or by the runtime.
struct BoxHeapMetadata : public HeapMetadata {
/// The offset from the beginning of a box to its value.
unsigned Offset;
constexpr BoxHeapMetadata(MetadataKind kind,
unsigned offset)
: HeapMetadata{kind}, Offset(offset)
{}
};
/// Heap metadata for runtime-instantiated generic boxes.
struct GenericBoxHeapMetadata : public BoxHeapMetadata {
/// The type inside the box.
const Metadata *BoxedType;
constexpr GenericBoxHeapMetadata(MetadataKind kind,
unsigned offset,
const Metadata *boxedType)
: BoxHeapMetadata{kind, offset},
BoxedType(boxedType)
{}
static unsigned getHeaderOffset(const Metadata *boxedType) {
// Round up the header size to alignment.
unsigned alignMask = boxedType->getValueWitnesses()->getAlignmentMask();
return (sizeof(HeapObject) + alignMask) & ~alignMask;
}
/// Project the value out of a box of this type.
OpaqueValue *project(HeapObject *box) const {
auto bytes = reinterpret_cast<char*>(box);
return reinterpret_cast<OpaqueValue *>(bytes + Offset);
}
/// Get the allocation size of this box.
unsigned getAllocSize() const {
return Offset + BoxedType->getValueWitnesses()->getSize();
}
/// Get the allocation alignment of this box.
unsigned getAllocAlignMask() const {
// Heap allocations are at least pointer aligned.
return BoxedType->getValueWitnesses()->getAlignmentMask()
| (alignof(void*) - 1);
}
};
/// Heap object destructor for a generic box allocated with swift_allocBox.
static void destroyGenericBox(HeapObject *o) {
auto metadata = static_cast<const GenericBoxHeapMetadata *>(o->metadata);
// Destroy the object inside.
auto *value = metadata->project(o);
metadata->BoxedType->vw_destroy(value);
// Deallocate the box.
SWIFT_RT_ENTRY_CALL(swift_deallocObject) (o, metadata->getAllocSize(),
metadata->getAllocAlignMask());
}
class BoxCacheEntry : public CacheEntry<BoxCacheEntry> {
public:
FullMetadata<GenericBoxHeapMetadata> Metadata;
BoxCacheEntry(size_t numArguments)
: Metadata{HeapMetadataHeader{{destroyGenericBox}, {nullptr}},
GenericBoxHeapMetadata{MetadataKind::HeapGenericLocalVariable, 0,
nullptr}} {
assert(numArguments == 1);
}
size_t getNumArguments() const {
return 1;
}
static const char *getName() {
return "BoxCache";
}
FullMetadata<GenericBoxHeapMetadata> *getData() {
return &Metadata;
}
const FullMetadata<GenericBoxHeapMetadata> *getData() const {
return &Metadata;
}
};
} // end anonymous namespace
static Lazy<MetadataCache<BoxCacheEntry>> Boxes;
SWIFT_RUNTIME_EXPORT
BoxPair::Return
swift::swift_allocBox(const Metadata *type) {
return SWIFT_RT_ENTRY_REF(swift_allocBox)(type);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
BoxPair::Return SWIFT_RT_ENTRY_IMPL(swift_allocBox)(const Metadata *type) {
// Get the heap metadata for the box.
auto &B = Boxes.get();
const void *typeArg = type;
auto entry = B.findOrAdd(&typeArg, 1, [&]() -> BoxCacheEntry* {
// Create a new entry for the box.
auto entry = BoxCacheEntry::allocate(B.getAllocator(), &typeArg, 1, 0);
auto metadata = entry->getData();
metadata->Offset = GenericBoxHeapMetadata::getHeaderOffset(type);
metadata->BoxedType = type;
return entry;
});
auto metadata = entry->getData();
// Allocate and project the box.
auto allocation = SWIFT_RT_ENTRY_CALL(swift_allocObject)(
metadata, metadata->getAllocSize(), metadata->getAllocAlignMask());
auto projection = metadata->project(allocation);
return BoxPair{allocation, projection};
}
void swift::swift_deallocBox(HeapObject *o) {
auto metadata = static_cast<const GenericBoxHeapMetadata *>(o->metadata);
SWIFT_RT_ENTRY_CALL(swift_deallocObject)(o, metadata->getAllocSize(),
metadata->getAllocAlignMask());
}
OpaqueValue *swift::swift_projectBox(HeapObject *o) {
// The compiler will use a nil reference as a way to avoid allocating memory
// for boxes of empty type. The address of an empty value is always undefined,
// so we can just return nil back in this case.
if (!o)
return reinterpret_cast<OpaqueValue*>(o);
auto metadata = static_cast<const GenericBoxHeapMetadata *>(o->metadata);
return metadata->project(o);
}
// Forward-declare this, but define it after swift_release.
extern "C" LLVM_LIBRARY_VISIBILITY
void _swift_release_dealloc(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL)
__attribute__((noinline,used));
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_retain(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
SWIFT_RT_ENTRY_REF(swift_retain)(object);
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_nonatomic_retain(HeapObject *object) {
SWIFT_RT_ENTRY_REF(swift_nonatomic_retain)(object);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_nonatomic_retain)(HeapObject *object) {
_swift_nonatomic_retain_inlined(object);
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_nonatomic_release(HeapObject *object) {
return SWIFT_RT_ENTRY_REF(swift_nonatomic_release)(object);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_nonatomic_release)(HeapObject *object) {
if (object && object->refCount.decrementShouldDeallocateNonAtomic()) {
// TODO: Use non-atomic _swift_release_dealloc?
_swift_release_dealloc(object);
}
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_retain)(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
_swift_retain_inlined(object);
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_retain_n(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
SWIFT_RT_ENTRY_REF(swift_retain_n)(object, n);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_retain_n)(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object) {
object->refCount.increment(n);
}
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_nonatomic_retain_n(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
SWIFT_RT_ENTRY_REF(swift_nonatomic_retain_n)(object, n);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_nonatomic_retain_n)(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object) {
object->refCount.incrementNonAtomic(n);
}
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_release(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
SWIFT_RT_ENTRY_REF(swift_release)(object);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_release)(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object && object->refCount.decrementShouldDeallocate()) {
_swift_release_dealloc(object);
}
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_release_n(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
return SWIFT_RT_ENTRY_REF(swift_release_n)(object, n);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_release_n)(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object && object->refCount.decrementShouldDeallocateN(n)) {
_swift_release_dealloc(object);
}
}
void swift::swift_setDeallocating(HeapObject *object) {
object->refCount.decrementFromOneAndDeallocateNonAtomic();
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_nonatomic_release_n(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
return SWIFT_RT_ENTRY_REF(swift_nonatomic_release_n)(object, n);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
void SWIFT_RT_ENTRY_IMPL(swift_nonatomic_release_n)(HeapObject *object, uint32_t n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object && object->refCount.decrementShouldDeallocateNNonAtomic(n)) {
_swift_release_dealloc(object);
}
}
size_t swift::swift_retainCount(HeapObject *object) {
return object->refCount.getCount();
}
size_t swift::swift_unownedRetainCount(HeapObject *object) {
return object->weakRefCount.getCount();
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_unownedRetain(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
object->weakRefCount.increment();
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_unownedRelease(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
if (object->weakRefCount.decrementShouldDeallocate()) {
// Only class objects can be weak-retained and weak-released.
auto metadata = object->metadata;
assert(metadata->isClassObject());
auto classMetadata = static_cast<const ClassMetadata*>(metadata);
assert(classMetadata->isTypeMetadata());
SWIFT_RT_ENTRY_CALL(swift_slowDealloc)
(object, classMetadata->getInstanceSize(),
classMetadata->getInstanceAlignMask());
}
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_unownedRetain_n(HeapObject *object, int n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
object->weakRefCount.increment(n);
}
SWIFT_RT_ENTRY_VISIBILITY
extern "C"
void swift::swift_unownedRelease_n(HeapObject *object, int n)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
if (object->weakRefCount.decrementShouldDeallocateN(n)) {
// Only class objects can be weak-retained and weak-released.
auto metadata = object->metadata;
assert(metadata->isClassObject());
auto classMetadata = static_cast<const ClassMetadata*>(metadata);
assert(classMetadata->isTypeMetadata());
SWIFT_RT_ENTRY_CALL(swift_slowDealloc)
(object, classMetadata->getInstanceSize(),
classMetadata->getInstanceAlignMask());
}
}
SWIFT_RT_ENTRY_VISIBILITY
HeapObject *swift::swift_tryPin(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
assert(object);
// Try to set the flag. If this succeeds, the caller will be
// responsible for clearing it.
if (object->refCount.tryIncrementAndPin()) {
return object;
}
// If setting the flag failed, it's because it was already set.
// Return nil so that the object will be deallocated later.
return nullptr;
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_unpin(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object && object->refCount.decrementAndUnpinShouldDeallocate()) {
_swift_release_dealloc(object);
}
}
SWIFT_RT_ENTRY_VISIBILITY
HeapObject *swift::swift_tryRetain(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
return SWIFT_RT_ENTRY_REF(swift_tryRetain)(object);
}
SWIFT_RT_ENTRY_VISIBILITY
HeapObject *swift::swift_nonatomic_tryPin(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
assert(object);
// Try to set the flag. If this succeeds, the caller will be
// responsible for clearing it.
if (object->refCount.tryIncrementAndPinNonAtomic()) {
return object;
}
// If setting the flag failed, it's because it was already set.
// Return nil so that the object will be deallocated later.
return nullptr;
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_nonatomic_unpin(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (object && object->refCount.decrementAndUnpinShouldDeallocateNonAtomic()) {
_swift_release_dealloc(object);
}
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
HeapObject *SWIFT_RT_ENTRY_IMPL(swift_tryRetain)(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return nullptr;
if (object->refCount.tryIncrement()) return object;
else return nullptr;
}
SWIFT_RUNTIME_EXPORT
extern "C"
bool swift_isDeallocating(HeapObject *object) {
return SWIFT_RT_ENTRY_REF(swift_isDeallocating)(object);
}
SWIFT_RT_ENTRY_IMPL_VISIBILITY
extern "C"
bool SWIFT_RT_ENTRY_IMPL(swift_isDeallocating)(HeapObject *object) {
if (!object) return false;
return object->refCount.isDeallocating();
}
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_unownedRetainStrong(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
assert(object->weakRefCount.getCount() &&
"object is not currently weakly retained");
if (! object->refCount.tryIncrement())
_swift_abortRetainUnowned(object);
}
SWIFT_RT_ENTRY_VISIBILITY
void
swift::swift_unownedRetainStrongAndRelease(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
if (!object)
return;
assert(object->weakRefCount.getCount() &&
"object is not currently weakly retained");
if (! object->refCount.tryIncrement())
_swift_abortRetainUnowned(object);
// This should never cause a deallocation.
bool dealloc = object->weakRefCount.decrementShouldDeallocate();
assert(!dealloc && "retain-strong-and-release caused dealloc?");
(void) dealloc;
}
void swift::swift_unownedCheck(HeapObject *object) {
if (!object) return;
assert(object->weakRefCount.getCount() &&
"object is not currently weakly retained");
if (object->refCount.isDeallocating())
_swift_abortRetainUnowned(object);
}
// Declared extern "C" LLVM_LIBRARY_VISIBILITY above.
void _swift_release_dealloc(HeapObject *object)
SWIFT_CC(RegisterPreservingCC_IMPL) {
asFullMetadata(object->metadata)->destroy(object);
}
#if SWIFT_OBJC_INTEROP
/// Perform the root -dealloc operation for a class instance.
void swift::swift_rootObjCDealloc(HeapObject *self) {
auto metadata = self->metadata;
assert(metadata->isClassObject());
auto classMetadata = static_cast<const ClassMetadata*>(metadata);
assert(classMetadata->isTypeMetadata());
swift_deallocClassInstance(self, classMetadata->getInstanceSize(),
classMetadata->getInstanceAlignMask());
}
#endif
void swift::swift_deallocClassInstance(HeapObject *object,
size_t allocatedSize,
size_t allocatedAlignMask) {
#if SWIFT_OBJC_INTEROP
// We need to let the ObjC runtime clean up any associated objects or weak
// references associated with this object.
objc_destructInstance((id)object);
#endif
SWIFT_RT_ENTRY_CALL(swift_deallocObject)
(object, allocatedSize,
allocatedAlignMask);
}
/// Variant of the above used in constructor failure paths.
SWIFT_RUNTIME_EXPORT
extern "C" void swift_deallocPartialClassInstance(HeapObject *object,
HeapMetadata const *metadata,
size_t allocatedSize,
size_t allocatedAlignMask) {
if (!object)
return;
// Destroy ivars
auto *classMetadata = _swift_getClassOfAllocated(object)->getClassObject();
assert(classMetadata && "Not a class?");
while (classMetadata != metadata) {
#if SWIFT_OBJC_INTEROP
// If we have hit a pure Objective-C class, we won't see another ivar
// destroyer.
if (classMetadata->isPureObjC()) {
// Set the class to the pure Objective-C superclass, so that when dealloc
// runs, it starts at that superclass.
object_setClass((id)object, (Class)classMetadata);
// Release the object.
objc_release((id)object);
return;
}
#endif
if (auto fn = classMetadata->getIVarDestroyer())
fn(object);
classMetadata = classMetadata->SuperClass->getClassObject();
assert(classMetadata && "Given metatype not a superclass of object type?");
}
#if SWIFT_OBJC_INTEROP
// If this class doesn't use Swift-native reference counting, use
// objc_release instead.
if (!usesNativeSwiftReferenceCounting(classMetadata)) {
// Find the pure Objective-C superclass.
while (!classMetadata->isPureObjC())
classMetadata = classMetadata->SuperClass->getClassObject();
// Set the class to the pure Objective-C superclass, so that when dealloc
// runs, it starts at that superclass.
object_setClass((id)object, (Class)classMetadata);
// Release the object.
objc_release((id)object);
return;
}
#endif
// The strong reference count should be +1 -- tear down the object
bool shouldDeallocate = object->refCount.decrementShouldDeallocate();
assert(shouldDeallocate);
(void) shouldDeallocate;
swift_deallocClassInstance(object, allocatedSize, allocatedAlignMask);
}
#if !defined(__APPLE__) && defined(SWIFT_RUNTIME_CLOBBER_FREED_OBJECTS)
static inline void memset_pattern8(void *b, const void *pattern8, size_t len) {
char *ptr = static_cast<char *>(b);
while (len >= 8) {
memcpy(ptr, pattern8, 8);
ptr += 8;
len -= 8;
}
memcpy(ptr, pattern8, len);
}
#endif
SWIFT_RT_ENTRY_VISIBILITY
void swift::swift_deallocObject(HeapObject *object,
size_t allocatedSize,
size_t allocatedAlignMask)
SWIFT_CC(RegisterPreservingCC_IMPL) {
assert(isAlignmentMask(allocatedAlignMask));
assert(object->refCount.isDeallocating());
#ifdef SWIFT_RUNTIME_CLOBBER_FREED_OBJECTS
memset_pattern8((uint8_t *)object + sizeof(HeapObject),
"\xAB\xAD\x1D\xEA\xF4\xEE\xD0\bB9",
allocatedSize - sizeof(HeapObject));
#endif
// If we are tracking leaks, stop tracking this object.
SWIFT_LEAKS_STOP_TRACKING_OBJECT(object);
// Drop the initial weak retain of the object.
//
// If the outstanding weak retain count is 1 (i.e. only the initial
// weak retain), we can immediately call swift_slowDealloc. This is
// useful both as a way to eliminate an unnecessary atomic
// operation, and as a way to avoid calling swift_unownedRelease on an
// object that might be a class object, which simplifies the logic
// required in swift_unownedRelease for determining the size of the
// object.
//
// If we see that there is an outstanding weak retain of the object,
// we need to fall back on swift_release, because it's possible for
// us to race against a weak retain or a weak release. But if the
// outstanding weak retain count is 1, then anyone attempting to
// increase the weak reference count is inherently racing against
// deallocation and thus in undefined-behavior territory. And
// we can even do this with a normal load! Here's why:
//
// 1. There is an invariant that, if the strong reference count
// is > 0, then the weak reference count is > 1.
//
// 2. The above lets us say simply that, in the absence of
// races, once a reference count reaches 0, there are no points
// which happen-after where the reference count is > 0.
//
// 3. To not race, a strong retain must happen-before a point
// where the strong reference count is > 0, and a weak retain
// must happen-before a point where the weak reference count
// is > 0.
//
// 4. Changes to either the strong and weak reference counts occur
// in a total order with respect to each other. This can
// potentially be done with a weaker memory ordering than
// sequentially consistent if the architecture provides stronger
// ordering for memory guaranteed to be co-allocated on a cache
// line (which the reference count fields are).
//
// 5. This function happens-after a point where the strong
// reference count was 0.
//
// 6. Therefore, if a normal load in this function sees a weak
// reference count of 1, it cannot be racing with a weak retain
// that is not racing with deallocation:
//
// - A weak retain must happen-before a point where the weak
// reference count is > 0.
//
// - This function logically decrements the weak reference
// count. If it is possible for it to see a weak reference
// count of 1, then at the end of this function, the
// weak reference count will logically be 0.
//
// - There can be no points after that point where the
// weak reference count will be > 0.
//
// - Therefore either the weak retain must happen-before this
// function, or this function cannot see a weak reference
// count of 1, or there is a race.
//
// Note that it is okay for there to be a race involving a weak
// *release* which happens after the strong reference count drops to
// 0. However, this is harmless: if our load fails to see the
// release, we will fall back on swift_unownedRelease, which does an
// atomic decrement (and has the ability to reconstruct
// allocatedSize and allocatedAlignMask).
if (object->weakRefCount.getCount() == 1) {
SWIFT_RT_ENTRY_CALL(swift_slowDealloc)
(object, allocatedSize,
allocatedAlignMask);
} else {
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
}
}
void swift::swift_weakInit(WeakReference *ref, HeapObject *value) {
ref->Value = value;
SWIFT_RT_ENTRY_CALL(swift_unownedRetain)(value);
}
void swift::swift_weakAssign(WeakReference *ref, HeapObject *newValue) {
SWIFT_RT_ENTRY_CALL(swift_unownedRetain)(newValue);
auto oldValue = ref->Value;
ref->Value = newValue;
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(oldValue);
}
HeapObject *swift::swift_weakLoadStrong(WeakReference *ref) {
auto object = ref->Value;
if (object == nullptr) return nullptr;
if (object->refCount.isDeallocating()) {
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
ref->Value = nullptr;
return nullptr;
}
return swift_tryRetain(object);
}
HeapObject *swift::swift_weakTakeStrong(WeakReference *ref) {
auto result = swift_weakLoadStrong(ref);
swift_weakDestroy(ref);
return result;
}
void swift::swift_weakDestroy(WeakReference *ref) {
auto tmp = ref->Value;
ref->Value = nullptr;
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(tmp);
}
void swift::swift_weakCopyInit(WeakReference *dest, WeakReference *src) {
auto object = src->Value;
if (object == nullptr) {
dest->Value = nullptr;
} else if (object->refCount.isDeallocating()) {
src->Value = nullptr;
dest->Value = nullptr;
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
} else {
dest->Value = object;
SWIFT_RT_ENTRY_CALL(swift_unownedRetain)(object);
}
}
void swift::swift_weakTakeInit(WeakReference *dest, WeakReference *src) {
auto object = src->Value;
dest->Value = object;
if (object != nullptr && object->refCount.isDeallocating()) {
dest->Value = nullptr;
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
}
}
void swift::swift_weakCopyAssign(WeakReference *dest, WeakReference *src) {
if (auto object = dest->Value) {
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
}
swift_weakCopyInit(dest, src);
}
void swift::swift_weakTakeAssign(WeakReference *dest, WeakReference *src) {
if (auto object = dest->Value) {
SWIFT_RT_ENTRY_CALL(swift_unownedRelease)(object);
}
swift_weakTakeInit(dest, src);
}
void swift::_swift_abortRetainUnowned(const void *object) {
(void)object;
swift::crash("attempted to retain deallocated object");
}
|
SwiftAndroid/swift
|
stdlib/public/runtime/HeapObject.cpp
|
C++
|
apache-2.0
| 28,000 | 31.672112 | 83 | 0.696429 | false |
#!/usr/bin/env ts-node
/**
* Wechaty Chatbot SDK - https://github.com/wechaty/wechaty
*
* @copyright 2016 Huan LI (李卓桓) <https://github.com/huan>, and
* Wechaty Contributors <https://github.com/wechaty>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { PuppetManager } from '../src/puppet-manager'
PuppetManager.installAll()
.catch(e => {
console.error(e)
process.exit(1)
})
|
binsee/wechaty
|
bin/puppet-install.ts
|
TypeScript
|
apache-2.0
| 964 | 33.214286 | 77 | 0.680585 | false |
/*
* Copyright 2007-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using JetBrains.DocumentModel;
using JetBrains.ReSharper.Feature.Services.Daemon;
namespace ReSharperExtension.Highlighting
{
/// <summary>
/// The highlighting that warns about high complexity
/// </summary>
[StaticSeverityHighlighting(Severity.ERROR, "CSharpInfo")]
public class ErrorWarning : IHighlighting
{
private readonly string myTooltip;
private DocumentRange range;
public ErrorWarning(DocumentRange range, string toolTip)
{
myTooltip = toolTip;
}
#region IHighlighting members
public string ToolTip
{
get { return myTooltip; }
}
public string ErrorStripeToolTip
{
get { return myTooltip; }
}
public int NavigationOffsetPatch
{
get { return 0; }
}
public bool IsValid()
{
return true;
}
public DocumentRange CalculateRange()
{
return range;
}
#endregion
}
}
|
fedorovr/YaccConstructor
|
src/ReSharperExtension/Highlighting/ErrorWarning.cs
|
C#
|
apache-2.0
| 1,692 | 25.238095 | 75 | 0.619385 | false |
/*
* Copyright (c) 2006-2018, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2018-04-02 tanek first implementation
* 2019-04-27 misonyo update to cortex-m7 series
*/
#include <rthw.h>
#include <rtdef.h>
#include <board.h>
/* The L1-caches on all Cortex®-M7s are divided into lines of 32 bytes. */
#define L1CACHE_LINESIZE_BYTE (32)
void rt_hw_cpu_icache_enable(void)
{
SCB_EnableICache();
}
void rt_hw_cpu_icache_disable(void)
{
SCB_DisableICache();
}
rt_base_t rt_hw_cpu_icache_status(void)
{
return 0;
}
void rt_hw_cpu_icache_ops(int ops, void* addr, int size)
{
rt_uint32_t address = (rt_uint32_t)addr & (rt_uint32_t) ~(L1CACHE_LINESIZE_BYTE - 1);
rt_int32_t size_byte = size + address - (rt_uint32_t)addr;
rt_uint32_t linesize = 32U;
if (ops & RT_HW_CACHE_INVALIDATE)
{
__DSB();
while (size_byte > 0)
{
SCB->ICIMVAU = address;
address += linesize;
size_byte -= linesize;
}
__DSB();
__ISB();
}
}
void rt_hw_cpu_dcache_enable(void)
{
SCB_EnableDCache();
}
void rt_hw_cpu_dcache_disable(void)
{
SCB_DisableDCache();
}
rt_base_t rt_hw_cpu_dcache_status(void)
{
return 0;
}
void rt_hw_cpu_dcache_ops(int ops, void* addr, int size)
{
rt_uint32_t startAddr = (rt_uint32_t)addr & (rt_uint32_t)~(L1CACHE_LINESIZE_BYTE - 1);
rt_uint32_t size_byte = size + (rt_uint32_t)addr - startAddr;
if (ops & (RT_HW_CACHE_FLUSH | RT_HW_CACHE_INVALIDATE))
{
SCB_CleanInvalidateDCache_by_Addr((rt_uint32_t *)startAddr, size_byte);
}
else if (ops & RT_HW_CACHE_FLUSH)
{
SCB_CleanDCache_by_Addr((rt_uint32_t *)startAddr, size_byte);
}
else if (ops & RT_HW_CACHE_INVALIDATE)
{
SCB_InvalidateDCache_by_Addr((rt_uint32_t *)startAddr, size_byte);
}
else
{
RT_ASSERT(0);
}
}
|
FlyLu/rt-thread
|
libcpu/arm/cortex-m7/cpu_cache.c
|
C
|
apache-2.0
| 1,990 | 21.348315 | 90 | 0.593766 | false |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>basic_socket_iostream::error</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../basic_socket_iostream.html" title="basic_socket_iostream">
<link rel="prev" href="endpoint_type.html" title="basic_socket_iostream::endpoint_type">
<link rel="next" href="expires_at.html" title="basic_socket_iostream::expires_at">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="endpoint_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_socket_iostream.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="expires_at.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.basic_socket_iostream.error"></a><a class="link" href="error.html" title="basic_socket_iostream::error">basic_socket_iostream::error</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp70961224"></a>
Get the last error associated with the stream.
</p>
<pre class="programlisting"><span class="keyword">const</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <span class="special">&</span> <span class="identifier">error</span><span class="special">()</span> <span class="keyword">const</span><span class="special">;</span>
</pre>
<h6>
<a name="boost_asio.reference.basic_socket_iostream.error.h0"></a>
<span class="phrase"><a name="boost_asio.reference.basic_socket_iostream.error.return_value"></a></span><a class="link" href="error.html#boost_asio.reference.basic_socket_iostream.error.return_value">Return
Value</a>
</h6>
<p>
An <code class="computeroutput"><span class="identifier">error_code</span></code> corresponding
to the last error from the stream.
</p>
<h6>
<a name="boost_asio.reference.basic_socket_iostream.error.h1"></a>
<span class="phrase"><a name="boost_asio.reference.basic_socket_iostream.error.example"></a></span><a class="link" href="error.html#boost_asio.reference.basic_socket_iostream.error.example">Example</a>
</h6>
<p>
To print the error associated with a failure to establish a connection:
</p>
<pre class="programlisting"><span class="identifier">tcp</span><span class="special">::</span><span class="identifier">iostream</span> <span class="identifier">s</span><span class="special">(</span><span class="string">"www.boost.org"</span><span class="special">,</span> <span class="string">"http"</span><span class="special">);</span>
<span class="keyword">if</span> <span class="special">(!</span><span class="identifier">s</span><span class="special">)</span>
<span class="special">{</span>
<span class="identifier">std</span><span class="special">::</span><span class="identifier">cout</span> <span class="special"><<</span> <span class="string">"Error: "</span> <span class="special"><<</span> <span class="identifier">s</span><span class="special">.</span><span class="identifier">error</span><span class="special">().</span><span class="identifier">message</span><span class="special">()</span> <span class="special"><<</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">endl</span><span class="special">;</span>
<span class="special">}</span>
</pre>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2013 Christopher M. Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="endpoint_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_socket_iostream.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="expires_at.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
|
NixaSoftware/CVis
|
venv/bin/doc/html/boost_asio/reference/basic_socket_iostream/error.html
|
HTML
|
apache-2.0
| 5,528 | 76.859155 | 592 | 0.651773 | false |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>Struct template impl</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset">
<link rel="up" href="../nary_expr.html#idp184785432" title="Description">
<link rel="prev" href="../nary_expr.html" title="Struct template nary_expr">
<link rel="next" href="../is_expr.html" title="Struct template is_expr">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../nary_expr.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../nary_expr.html#idp184785432"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="../is_expr.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="refentry">
<a name="boost.proto.nary_expr.impl"></a><div class="titlepage"></div>
<div class="refnamediv">
<h2><span class="refentrytitle">Struct template impl</span></h2>
<p>boost::proto::nary_expr::impl</p>
</div>
<h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2>
<div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: <<a class="link" href="../../../proto/reference.html#header.boost.proto.traits_hpp" title="Header <boost/proto/traits.hpp>">boost/proto/traits.hpp</a>>
</span>
<span class="keyword">template</span><span class="special"><</span><span class="keyword">typename</span> <a class="link" href="../../../Expr.html" title="Concept Expr">Expr</a><span class="special">,</span> <span class="keyword">typename</span> State<span class="special">,</span> <span class="keyword">typename</span> Data<span class="special">></span>
<span class="keyword">struct</span> <a class="link" href="impl.html" title="Struct template impl">impl</a> <span class="special">:</span>
<span class="keyword"></span> <a class="link" href="../pass_through.html" title="Struct template pass_through">proto::pass_through</a><nary_expr>::template impl<Expr, State, Data>
<span class="special">{</span>
<span class="special">}</span><span class="special">;</span></pre></div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2008 Eric Niebler<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../nary_expr.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../nary_expr.html#idp184785432"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="../is_expr.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
|
NixaSoftware/CVis
|
venv/bin/doc/html/boost/proto/nary_expr/impl.html
|
HTML
|
apache-2.0
| 4,148 | 75.814815 | 437 | 0.644648 | false |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="de">
<head>
<title>ImportColumnCSV (ARX Developer Documentation)</title>
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ImportColumnCSV (ARX Developer Documentation)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ImportColumnCSV.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/deidentifier/arx/io/ImportColumn.html" title="class in org.deidentifier.arx.io"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../org/deidentifier/arx/io/ImportColumnExcel.html" title="class in org.deidentifier.arx.io"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/deidentifier/arx/io/ImportColumnCSV.html" target="_top">Frames</a></li>
<li><a href="ImportColumnCSV.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor_summary">Constr</a> | </li>
<li><a href="#methods_inherited_from_class_org.deidentifier.arx.io.ImportColumnIndexed">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor_detail">Constr</a> | </li>
<li>Method</li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.deidentifier.arx.io</div>
<h2 title="Class ImportColumnCSV" class="title">Class ImportColumnCSV</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li><a href="../../../../org/deidentifier/arx/io/ImportColumn.html" title="class in org.deidentifier.arx.io">org.deidentifier.arx.io.ImportColumn</a></li>
<li>
<ul class="inheritance">
<li><a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html" title="class in org.deidentifier.arx.io">org.deidentifier.arx.io.ImportColumnIndexed</a></li>
<li>
<ul class="inheritance">
<li>org.deidentifier.arx.io.ImportColumnCSV</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd><a href="../../../../org/deidentifier/arx/io/IImportColumnIndexed.html" title="interface in org.deidentifier.arx.io">IImportColumnIndexed</a></dd>
</dl>
<hr>
<br>
<pre>public class <span class="strong">ImportColumnCSV</span>
extends <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html" title="class in org.deidentifier.arx.io">ImportColumnIndexed</a></pre>
<div class="block">Represents a single CSV data column
CSV columns are referred to by an index (see <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html" title="class in org.deidentifier.arx.io"><code>ImportColumnIndexed</code></a>).</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(int, org.deidentifier.arx.DataType)">ImportColumnCSV</a></strong>(int index,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(int, org.deidentifier.arx.DataType, boolean)">ImportColumnCSV</a></strong>(int index,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(int, java.lang.String, org.deidentifier.arx.DataType)">ImportColumnCSV</a></strong>(int index,
java.lang.String aliasName,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(int, java.lang.String, org.deidentifier.arx.DataType, boolean)">ImportColumnCSV</a></strong>(int index,
java.lang.String aliasName,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(java.lang.String, org.deidentifier.arx.DataType)">ImportColumnCSV</a></strong>(java.lang.String name,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(java.lang.String, org.deidentifier.arx.DataType, boolean)">ImportColumnCSV</a></strong>(java.lang.String name,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(java.lang.String, java.lang.String, org.deidentifier.arx.DataType)">ImportColumnCSV</a></strong>(java.lang.String name,
java.lang.String alias,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a href="../../../../org/deidentifier/arx/io/ImportColumnCSV.html#ImportColumnCSV(java.lang.String, java.lang.String, org.deidentifier.arx.DataType, boolean)">ImportColumnCSV</a></strong>(java.lang.String name,
java.lang.String alias,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</code>
<div class="block">Creates a new instance of this object with the given parameters.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_org.deidentifier.arx.io.ImportColumnIndexed">
<!-- -->
</a>
<h3>Methods inherited from class org.deidentifier.arx.io.<a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html" title="class in org.deidentifier.arx.io">ImportColumnIndexed</a></h3>
<code><a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html#getIndex()">getIndex</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html#getName()">getName</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html#isIndexSpecified()">isIndexSpecified</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html#setIndex(int)">setIndex</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumnIndexed.html#setName(java.lang.String)">setName</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_org.deidentifier.arx.io.ImportColumn">
<!-- -->
</a>
<h3>Methods inherited from class org.deidentifier.arx.io.<a href="../../../../org/deidentifier/arx/io/ImportColumn.html" title="class in org.deidentifier.arx.io">ImportColumn</a></h3>
<code><a href="../../../../org/deidentifier/arx/io/ImportColumn.html#getAliasName()">getAliasName</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumn.html#getDataType()">getDataType</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumn.html#isCleansing()">isCleansing</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumn.html#setAliasName(java.lang.String)">setAliasName</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumn.html#setCleansing(boolean)">setCleansing</a>, <a href="../../../../org/deidentifier/arx/io/ImportColumn.html#setDataType(org.deidentifier.arx.DataType)">setDataType</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="ImportColumnCSV(int, org.deidentifier.arx.DataType)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(int index,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>index</code> - the index</dd><dd><code>datatype</code> - the datatype</dd><dt><span class="strong">See Also:</span></dt><dd><code>ImportColumnIndexed}</code></dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(int, org.deidentifier.arx.DataType, boolean)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(int index,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>index</code> - the index</dd><dd><code>datatype</code> - the datatype</dd><dd><code>cleansing</code> - the cleansing</dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(int, java.lang.String, org.deidentifier.arx.DataType)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(int index,
java.lang.String aliasName,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>index</code> - the index</dd><dd><code>aliasName</code> - the alias name</dd><dd><code>datatype</code> - the datatype</dd><dt><span class="strong">See Also:</span></dt><dd><code>ImportColumnIndexed}</code></dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(int, java.lang.String, org.deidentifier.arx.DataType, boolean)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(int index,
java.lang.String aliasName,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>index</code> - the index</dd><dd><code>aliasName</code> - the alias name</dd><dd><code>datatype</code> - the datatype</dd><dd><code>cleansing</code> - the cleansing</dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(java.lang.String, org.deidentifier.arx.DataType)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(java.lang.String name,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name</dd><dd><code>datatype</code> - the datatype</dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(java.lang.String, org.deidentifier.arx.DataType, boolean)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(java.lang.String name,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name</dd><dd><code>datatype</code> - the datatype</dd><dd><code>cleansing</code> - the cleansing</dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(java.lang.String, java.lang.String, org.deidentifier.arx.DataType)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(java.lang.String name,
java.lang.String alias,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name</dd><dd><code>alias</code> - the alias</dd><dd><code>datatype</code> - the datatype</dd></dl>
</li>
</ul>
<a name="ImportColumnCSV(java.lang.String, java.lang.String, org.deidentifier.arx.DataType, boolean)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>ImportColumnCSV</h4>
<pre>public ImportColumnCSV(java.lang.String name,
java.lang.String alias,
<a href="../../../../org/deidentifier/arx/DataType.html" title="class in org.deidentifier.arx">DataType</a><?> datatype,
boolean cleansing)</pre>
<div class="block">Creates a new instance of this object with the given parameters.</div>
<dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name</dd><dd><code>alias</code> - the alias</dd><dd><code>datatype</code> - the datatype</dd><dd><code>cleansing</code> - the cleansing</dd></dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ImportColumnCSV.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/deidentifier/arx/io/ImportColumn.html" title="class in org.deidentifier.arx.io"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../org/deidentifier/arx/io/ImportColumnExcel.html" title="class in org.deidentifier.arx.io"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/deidentifier/arx/io/ImportColumnCSV.html" target="_top">Frames</a></li>
<li><a href="ImportColumnCSV.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor_summary">Constr</a> | </li>
<li><a href="#methods_inherited_from_class_org.deidentifier.arx.io.ImportColumnIndexed">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor_detail">Constr</a> | </li>
<li>Method</li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
kentoa/arx
|
doc/dev/org/deidentifier/arx/io/ImportColumnCSV.html
|
HTML
|
apache-2.0
| 20,420 | 48.424691 | 644 | 0.649804 | false |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
namespace Microsoft.CodeAnalysis.Diagnostics
{
internal static class PredefinedBuildTools
{
public static readonly string Build = FeaturesResources.BuildToolBuild;
public static readonly string EnC = FeaturesResources.BuildToolEnC;
public static readonly string Compiler = FeaturesResources.BuildToolLive;
}
}
|
MavenRain/roslyn
|
src/Features/Core/Diagnostics/PredefinedBuildTools.cs
|
C#
|
apache-2.0
| 511 | 45.272727 | 161 | 0.762279 | false |
# -*- coding: utf-8 -*-
"""
openload.io urlresolver plugin
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from lib import helpers
from urlresolver9 import common
from urlresolver9.resolver import UrlResolver, ResolverError
class StreamangoResolver(UrlResolver):
name = "streamango"
domains = ["streamango.com"]
pattern = '(?://|\.)(streamango\.(?:io|com))/(?:embed|f)/([0-9a-zA-Z-_]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url).content
sources = re.findall('type:"video/mp4",src:"([^"]+)"', html)
if sources:
source = sources[0]
if not 'http' in source:
source = 'https:' + source
return source + helpers.append_headers({'User-Agent': common.IE_USER_AGENT})
else:
raise ResolverError('File Not Found or removed')
def get_url(self, host, media_id):
return 'http://streamango.com/embed/%s' % media_id
|
mrknow/filmkodi
|
script.mrknow.urlresolver/lib/urlresolver9/plugins/streamango.py
|
Python
|
apache-2.0
| 1,671 | 34.553191 | 88 | 0.672651 | false |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.web.reactive.error;
import java.util.Collections;
import java.util.Map;
import org.springframework.boot.web.error.ErrorAttributeOptions;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import org.springframework.web.server.ServerWebExchange;
/**
* Provides access to error attributes which can be logged or presented to the user.
*
* @author Brian Clozel
* @author Scott Frederick
* @since 2.0.0
* @see DefaultErrorAttributes
*/
public interface ErrorAttributes {
/**
* Return a {@link Map} of the error attributes. The map can be used as the model of
* an error page, or returned as a {@link ServerResponse} body.
* @param request the source request
* @param options options for error attribute contents
* @return a map of error attributes
*/
default Map<String, Object> getErrorAttributes(ServerRequest request, ErrorAttributeOptions options) {
return Collections.emptyMap();
}
/**
* Return the underlying cause of the error or {@code null} if the error cannot be
* extracted.
* @param request the source ServerRequest
* @return the {@link Exception} that caused the error or {@code null}
*/
Throwable getError(ServerRequest request);
/**
* Store the given error information in the current {@link ServerWebExchange}.
* @param error the {@link Exception} that caused the error
* @param exchange the source exchange
*/
void storeErrorInformation(Throwable error, ServerWebExchange exchange);
}
|
jxblum/spring-boot
|
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/reactive/error/ErrorAttributes.java
|
Java
|
apache-2.0
| 2,198 | 33.888889 | 103 | 0.753867 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.vortex.driver;
import net.jcip.annotations.ThreadSafe;
import org.apache.reef.annotations.audience.DriverSide;
import org.apache.reef.util.Optional;
import org.apache.reef.vortex.api.VortexFunction;
import org.apache.reef.vortex.api.VortexFuture;
import javax.inject.Inject;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Default implementation of VortexMaster.
* Uses two thread-safe data structures(pendingTasklets, runningWorkers) in implementing VortexMaster interface.
*/
@ThreadSafe
@DriverSide
final class DefaultVortexMaster implements VortexMaster {
private final AtomicInteger taskletIdCounter = new AtomicInteger();
private final RunningWorkers runningWorkers;
private final PendingTasklets pendingTasklets;
/**
* @param runningWorkers for managing all running workers.
*/
@Inject
DefaultVortexMaster(final RunningWorkers runningWorkers,
final PendingTasklets pendingTasklets) {
this.runningWorkers = runningWorkers;
this.pendingTasklets = pendingTasklets;
}
/**
* Add a new tasklet to pendingTasklets.
*/
@Override
public <TInput extends Serializable, TOutput extends Serializable> VortexFuture<TOutput>
enqueueTasklet(final VortexFunction<TInput, TOutput> function, final TInput input) {
// TODO[REEF-500]: Simple duplicate Vortex Tasklet launch.
final VortexFuture<TOutput> vortexFuture = new VortexFuture<>();
this.pendingTasklets.addLast(new Tasklet<>(taskletIdCounter.getAndIncrement(), function, input, vortexFuture));
return vortexFuture;
}
/**
* Add a new worker to runningWorkers.
*/
@Override
public void workerAllocated(final VortexWorkerManager vortexWorkerManager) {
runningWorkers.addWorker(vortexWorkerManager);
}
/**
* Remove the worker from runningWorkers and add back the lost tasklets to pendingTasklets.
*/
@Override
public void workerPreempted(final String id) {
final Optional<Collection<Tasklet>> preemptedTasklets = runningWorkers.removeWorker(id);
if (preemptedTasklets.isPresent()) {
for (final Tasklet tasklet : preemptedTasklets.get()) {
pendingTasklets.addFirst(tasklet);
}
}
}
/**
* Notify task completion to runningWorkers.
*/
@Override
public void taskletCompleted(final String workerId,
final int taskletId,
final Serializable result) {
runningWorkers.completeTasklet(workerId, taskletId, result);
}
/**
* Notify task failure to runningWorkers.
*/
@Override
public void taskletErrored(final String workerId, final int taskletId, final Exception exception) {
runningWorkers.errorTasklet(workerId, taskletId, exception);
}
/**
* Terminate the job.
*/
@Override
public void terminate() {
runningWorkers.terminate();
}
}
|
swlsw/incubator-reef
|
lang/java/reef-applications/reef-vortex/src/main/java/org/apache/reef/vortex/driver/DefaultVortexMaster.java
|
Java
|
apache-2.0
| 3,731 | 32.612613 | 115 | 0.734388 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.test.functional;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.accumulo.core.client.Accumulo;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.clientImpl.ClientContext;
import org.apache.accumulo.harness.AccumuloClusterHarness;
import org.apache.accumulo.test.TestIngest;
import org.apache.accumulo.test.TestIngest.IngestParams;
import org.apache.hadoop.io.Text;
import org.junit.Test;
/**
* See ACCUMULO-779
*/
public class FateStarvationIT extends AccumuloClusterHarness {
@Override
protected int defaultTimeoutSeconds() {
return 2 * 60;
}
@Test
public void run() throws Exception {
String tableName = getUniqueNames(1)[0];
try (AccumuloClient c = Accumulo.newClient().from(getClientProps()).build()) {
c.tableOperations().create(tableName);
c.tableOperations().addSplits(tableName, TestIngest.getSplitPoints(0, 100000, 50));
IngestParams params = new IngestParams(getClientProps(), tableName, 100_000);
params.random = 89;
params.timestamp = 7;
params.dataSize = 50;
params.cols = 1;
TestIngest.ingest(c, params);
c.tableOperations().flush(tableName, null, null, true);
List<Text> splits = new ArrayList<>(TestIngest.getSplitPoints(0, 100000, 67));
Random rand = new SecureRandom();
for (int i = 0; i < 100; i++) {
int idx1 = rand.nextInt(splits.size() - 1);
int idx2 = rand.nextInt(splits.size() - (idx1 + 1)) + idx1 + 1;
c.tableOperations().compact(tableName, splits.get(idx1), splits.get(idx2), false, false);
}
c.tableOperations().offline(tableName);
FunctionalTestUtils.assertNoDanglingFateLocks((ClientContext) c, getCluster());
}
}
}
|
keith-turner/accumulo
|
test/src/main/java/org/apache/accumulo/test/functional/FateStarvationIT.java
|
Java
|
apache-2.0
| 2,658 | 33.519481 | 97 | 0.719338 | false |
/*
Copyright (c) 2004-2010, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
if(!dojo._hasResource["dojox.image._base"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code.
dojo._hasResource["dojox.image._base"] = true;
dojo.provide("dojox.image._base");
// summary: Core Image-related functionality
;(function(d){
var cacheNode;
dojox.image.preload = function(/* Array */urls){
// summary: Preload a list of images in the dom.
//
// urls: Array
// The list of urls to load. Can be any valid .src attribute.
//
// example:
// Load two images into cache:
// | dojox.image.preload(["foo.png", "bar.gif"]);
//
// example:
// Using djConfig:
// | var djConfig = {
// | preloadImages:["bar.png", "baz.png", "http://example.com/icon.gif"]
// | };
//
// returns: Array
// An Array of DomNodes that have been cached.
if(!cacheNode){
cacheNode = d.create("div", {
style:{ position:"absolute", top:"-9999px", height:"1px", overflow:"hidden" }
}, d.body());
}
// place them in the hidden cachenode
return d.map(urls, function(url){
return d.create("img", { src: url }, cacheNode);
});
};
/*=====
dojo.mixin(djConfig, {
// preloadImages: Array?
// An optional array of urls to preload immediately upon
// page load. Uses `dojox.image`, and is unused if not present.
preloadImages: []
})
=====*/
if(d.config.preloadImages){
d.addOnLoad(function(){
dojox.image.preload(d.config.preloadImages);
});
}
// dojo.declare("dojox.image.Image", dijit._Widget, {
// // summary: an Image widget
// //
// // example:
// // | new dojox.Image({ src:"foo.png", id:"bar" });
//
// alt: "",
// src: dojo._blankGif,
// title: "",
//
// onLoad: function(e){
// // summary: Stub fired when this image is really ready.
// },
//
// _onLoad: function(e){
// // summary: private function to normalize `onLoad` for this
// // instance.
// this.onLoad(e);
// },
//
// _setSrcAttr: function(newSrc){
// // summary: Function so widget.attr('src', someUrl) works
//
// var ts = this.domNode, os = td.src;
// if(os !== newSrc){
// td.src = newSrc;
// }
// },
//
// /* Sugar Functions: */
//
// crossFade: function(newSrc){
// // summary: Set this Image to a new src with crossfading
// //
// // example:
// // dijit.byId("bar").crossFade("/images/newImage.png");
// //
//
// d.fadeOut({
// node: this.domNode,
// onEnd: d.hitch(this, function(){
// this.attr('src', newSrc);
// d.fadeIn({
// node: this.domNode,
// delay: 75
// }).play();
// })
// }).play();
// },
//
// /* Overrides */
//
// buildRendering: function(){
// // override buildrendering to create a real "img" instead of a div
// // when no srcNodeRef is passed. also wire up single onload.
// this.domNode = this.srcNodeRef || d.create('img');
// this.connect(this.domNode, "onload", "_onload");
// }
//
// });
})(dojo);
}
|
ozoneplatform/owf-framework
|
web-app/js-lib/dojo-release-1.5.0/dojox/image/_base.js
|
JavaScript
|
apache-2.0
| 3,257 | 24.056 | 129 | 0.564016 | false |
/*
* Copyright 2019-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.intellij.ideabuck.contributor;
import com.facebook.buck.intellij.ideabuck.lang.BcfgFile;
import com.facebook.buck.intellij.ideabuck.lang.BcfgFileType;
import com.facebook.buck.intellij.ideabuck.lang.psi.BcfgProperty;
import com.facebook.buck.intellij.ideabuck.lang.psi.BcfgSection;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiManager;
import com.intellij.psi.search.FileTypeIndex;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.indexing.FileBasedIndex;
import java.util.Objects;
import java.util.stream.Stream;
/**
* One-off utilities for working with {@code .buckconfig} files and elements that don't currently
* have a better home.
*/
public class BcfgUtils {
/** Returns the known buckconfig files in the given {@link Project}. */
public static Stream<BcfgFile> findBcfgFiles(Project project) {
// TODO: Replace this with a method that returns the buckconfig
// files for a given cell, accounting for included config files and
// Buck's rules of precedence, as per:
// https://buckbuild.com/files-and-dirs/buckconfig.html#config-precedence
PsiManager psiManager = PsiManager.getInstance(project);
GlobalSearchScope searchScope = GlobalSearchScope.allScope(project);
return FileBasedIndex.getInstance()
.getContainingFiles(FileTypeIndex.NAME, BcfgFileType.INSTANCE, searchScope)
.stream()
.map(psiManager::findFile)
.map(BcfgFile.class::cast)
.filter(Objects::nonNull);
}
/** Returns all known properties in the given {@link Project}. */
public static Stream<BcfgProperty> findProperties(Project project) {
return findBcfgFiles(project)
.map(psiFile -> PsiTreeUtil.getChildrenOfType(psiFile, BcfgProperty.class))
.filter(Objects::nonNull)
.flatMap(Stream::of);
}
/** Returns all known sections in the given {@link Project}. */
public static Stream<BcfgSection> findSections(Project project) {
return findBcfgFiles(project)
.map(psiFile -> PsiTreeUtil.getChildrenOfType(psiFile, BcfgSection.class))
.filter(Objects::nonNull)
.flatMap(Stream::of);
}
}
|
rmaz/buck
|
tools/ideabuck/src/com/facebook/buck/intellij/ideabuck/contributor/BcfgUtils.java
|
Java
|
apache-2.0
| 2,835 | 40.691176 | 97 | 0.743563 | false |
package com.haskforce.parsing.srcExtsDatatypes;
/**
* TyFun l (Type l) (Type l)
*/
public class TyFun extends TypeTopType {
public SrcInfoSpan srcInfoSpan;
public TypeTopType t1;
public TypeTopType t2;
@Override
public String toString() {
return "TyFun{" +
"t1=" + t1 +
", t2=" + t2 +
'}';
}
}
|
charleso/intellij-haskforce
|
src/com/haskforce/parsing/srcExtsDatatypes/TyFun.java
|
Java
|
apache-2.0
| 380 | 20.111111 | 47 | 0.539474 | false |
+++
Talk_date = ""
Talk_start_time = "11:30"
Talk_end_time = "12:00"
Title = "DevOps: Histórias do Front"
Type = "talk"
Speakers = ["zandler-oliveira"]
+++
|
gomex/devopsdays-web
|
content/events/2019-belo-horizonte/program/zandler-oliveira.md
|
Markdown
|
apache-2.0
| 159 | 14.8 | 36 | 0.620253 | false |
import _ from 'lodash';
import $ from 'jquery';
import Handlebars from 'handlebars';
import I18n from './i18n';
import View from './view'
import Storage from './storage'
const MAX_HEIGHT = 375;
function noop() {}
// helper to resolve an event handler for an app
function resolveHandler(app, name) {
let handler = app.events[name];
if (!handler) { return noop; }
return _.isFunction(handler) ? handler.bind(app) : app[handler].bind(app);
}
// Binds DOM events using jQuery and Framework events using `zafClient.on`
function bindEvents(app) {
_.each(app.events, function(fn, key) {
let splittedKey = key.split(' '),
event = splittedKey[0],
element = splittedKey[1],
isDomEvent = !!element,
func = resolveHandler(app, key);
if (isDomEvent) {
$(document).on(event, element, func);
} else {
app.zafClient.on(event, func);
}
}.bind(app));
}
// Defines `setting`, `store` and `spinner` helpers
// See https://developer.zendesk.com/apps/docs/agent/templates#framework-helpers
function registerHelpers(app) {
['setting', 'store'].forEach(function(api) {
Handlebars.registerHelper(api, function(key) {
return app[api](key);
});
});
Handlebars.registerHelper('spinner', function() {
return new Handlebars.SafeString(`<div class="spinner dotted"></div>`);
});
}
function BaseApp(zafClient, data) {
this.zafClient = zafClient;
// Defines I18n (internationalization) API
// See https://developer.zendesk.com/apps/docs/agent/i18n
this.I18n = { t: I18n.t };
registerHelpers(this);
bindEvents(this);
this._metadata = data.metadata;
this._context = data.context;
this._storage = new Storage(this._metadata.installationId);
let view = new View({ afterRender: () => {
// automatically resize the iframe based on document height
let newHeight = Math.min($('html').height(), MAX_HEIGHT);
this.zafClient.invoke('resize', { height: newHeight, width: '100%' });
}});
// Defines `switchTo` API
// See https://developer.zendesk.com/apps/docs/agent/interface#this.switchtotemplatename-data
this.switchTo = view.switchTo.bind(view);
// Defines `renderTemplate` API
// https://developer.zendesk.com/apps/docs/agent/interface#this.rendertemplatetemplatename-data
this.renderTemplate = view.renderTemplate.bind(view);
// Switches to `defaultState` if defined on the prototype
// See https://developer.zendesk.com/apps/docs/agent/templates#switching-templates
if (this.defaultState) {
view.switchTo(this.defaultState);
}
// Trigger initial events
let evt = { firstLoad: true };
resolveHandler(this, 'app.created')();
resolveHandler(this, 'app.activated')(evt, evt);
// Trigger app.willDestroy if the iframe is destroyed
$(window).unload(() => {
resolveHandler(this, 'app.willDestroy')();
});
}
BaseApp.prototype = {
// These are public APIs of the v1 framework that we are shimming to make it
// easier to migrate existing v1 apps. See the respective links for the relevant docs.
// https://developer.zendesk.com/apps/docs/agent/events
events: {},
// https://developer.zendesk.com/apps/docs/agent/requests#define-a-request
requests: {},
// https://developer.zendesk.com/apps/docs/agent/data#id
id: function() {
return this._metadata.appId;
},
// https://developer.zendesk.com/apps/docs/agent/data#installationid
installationId: function() {
return this._metadata.installationId;
},
// https://developer.zendesk.com/apps/docs/agent/data#guid
guid: function() {
return this._context.instanceGuid;
},
// https://developer.zendesk.com/apps/docs/agent/interface#this.currentlocation
currentLocation: function() {
return this._context.location;
},
// https://developer.zendesk.com/apps/docs/agent/requests#make-a-request
ajax: function(name) {
let req = this.requests[name],
options = _.isFunction(req) ? req.apply(this, Array.prototype.slice.call(arguments, 1)) : req,
dfd = $.Deferred(),
app = this;
let alwaysCallback = resolveHandler(this, name + '.always');
let doneCallback = function() {
dfd.resolveWith(app, arguments);
resolveHandler(app, name + '.done').apply(null, arguments);
alwaysCallback.apply(null, arguments);
};
let failCallback = function() {
dfd.rejectWith(app, arguments);
resolveHandler(app, name + '.fail').apply(null, arguments);
alwaysCallback.apply(null, arguments);
};
this.zafClient.request(options).then(doneCallback, failCallback);
return dfd.promise();
},
// https://developer.zendesk.com/apps/docs/agent/promises
promise: function(fn) {
if (!_.isFunction(fn)) { throw new Error('`promise` needs to be passed a Function'); }
var dfd = $.Deferred();
_.defer(fn.bind(this, dfd.resolve.bind(dfd), dfd.reject.bind(dfd)));
return dfd.promise();
},
// https://developer.zendesk.com/apps/docs/agent/promises
when: function() {
return $.when.apply($, arguments);
},
// https://developer.zendesk.com/apps/docs/agent/interface#this.selector
$: function() {
let args = Array.prototype.slice.call(arguments, 0);
if (!args.length) return $('body');
return $.apply($, arguments);
},
// https://developer.zendesk.com/apps/docs/agent/data#setting
setting: function(name) {
return this._metadata.settings[name];
},
// https://developer.zendesk.com/apps/docs/agent/storage#javascript-api
store: function(keyOrObject, value) {
if (arguments.length === 1) {
return this._storage.get(keyOrObject);
}
this._storage.set(keyOrObject, value);
}
}
// helper to create a subclass of BaseApp with the passed prototype
BaseApp.extend = function(appPrototype) {
let App = function(client, data) {
BaseApp.call(this, client, data);
};
App.prototype = _.extend({}, BaseApp.prototype, appPrototype);
return App;
};
export default BaseApp;
|
sinanuozdemir/kylie-zendesk-app-v2
|
lib/javascripts/base_app.js
|
JavaScript
|
apache-2.0
| 5,956 | 29.233503 | 102 | 0.676629 | false |
import {MessageType_Enum, deserializeMessage} from '#core/3p-frame-messaging';
import * as Preact from '#preact';
import {useCallback, useEffect, useMemo, useRef, useState} from '#preact';
import {ProxyIframeEmbed} from '#preact/component/3p-frame';
const TYPE = 'github';
const DEFAULT_TITLE = 'Github Gist';
const FULL_HEIGHT = '100%';
/**
* @param {!BentoGist.Props} props
* @return {PreactDef.Renderable}
*/
export function BentoGist({
gistId,
file,
title = DEFAULT_TITLE,
requestResize,
style,
...rest
}) {
const iframeRef = useRef(null);
const [height, setHeight] = useState(null);
const messageHandler = useCallback(
(event) => {
const data = deserializeMessage(event.data);
if (data['type'] == MessageType_Enum.EMBED_SIZE) {
const height = data['height'];
if (requestResize) {
requestResize(height);
setHeight(FULL_HEIGHT);
} else {
setHeight(height);
}
}
},
[requestResize]
);
useEffect(() => {
/** Unmount Procedure */
return () => {
// Release iframe resources
iframeRef.current = null;
};
}, []);
const options = useMemo(
() => ({
gistid: gistId,
file,
}),
[gistId, file]
);
return (
<ProxyIframeEmbed
title={title}
options={options}
ref={iframeRef}
type={TYPE}
messageHandler={messageHandler}
style={height ? {...style, height} : style}
{...rest}
/>
);
}
|
alanorozco/amphtml
|
extensions/amp-gist/1.0/component.js
|
JavaScript
|
apache-2.0
| 1,497 | 21.681818 | 78 | 0.589846 | false |
/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package chaincode
import (
"golang.org/x/net/context"
"fmt"
"github.com/hyperledger/fabric/core/util"
pb "github.com/hyperledger/fabric/protos/peer"
)
//create a Transactions - this has to change to Proposal when we move chaincode to use Proposals
func createTx(typ pb.Transaction_Type, ccname string, args [][]byte) (*pb.Transaction, error) {
var tx *pb.Transaction
var err error
uuid := util.GenerateUUID()
spec := &pb.ChaincodeInvocationSpec{ChaincodeSpec: &pb.ChaincodeSpec{Type: 1, ChaincodeID: &pb.ChaincodeID{Name: ccname}, CtorMsg: &pb.ChaincodeInput{Args: args}}}
tx, err = pb.NewChaincodeExecute(spec, uuid, typ)
if nil != err {
return nil, err
}
return tx, nil
}
func GetCDSFromLCCC(ctxt context.Context, chainID string, chaincodeID string) ([]byte, error) {
payload, _, err := ExecuteChaincode(ctxt, pb.Transaction_CHAINCODE_INVOKE, string(DefaultChain), "lccc", [][]byte{[]byte("getdepspec"), []byte(chainID), []byte(chaincodeID)})
return payload, err
}
// ExecuteChaincode executes a given chaincode given chaincode name and arguments
func ExecuteChaincode(ctxt context.Context, typ pb.Transaction_Type, chainname string, ccname string, args [][]byte) ([]byte, *pb.ChaincodeEvent, error) {
var tx *pb.Transaction
var err error
var b []byte
var ccevent *pb.ChaincodeEvent
tx, err = createTx(typ, ccname, args)
b, ccevent, err = Execute(ctxt, GetChain(ChainName(chainname)), tx)
if err != nil {
return nil, nil, fmt.Errorf("Error deploying chaincode: %s", err)
}
return b, ccevent, err
}
|
stonejiang208/fabric
|
core/chaincode/chaincodeexec.go
|
GO
|
apache-2.0
| 2,111 | 34.779661 | 175 | 0.746092 | false |
/************************************************************************
AvalonDock
Copyright (C) 2007-2013 Xceed Software Inc.
This program is provided to you under the terms of the New BSD
License (BSD) as published at http://avalondock.codeplex.com/license
For more features, controls, and fast professional support,
pick up AvalonDock in Extended WPF Toolkit Plus at http://xceed.com/wpf_toolkit
Stay informed: follow @datagrid on Twitter or Like facebook.com/datagrids
**********************************************************************/
using System;
namespace Xceed.Wpf.AvalonDock.Themes
{
public class Vs2013LightTheme : Theme
{
public override Uri GetResourceUri()
{
return new Uri(
"/Xceed.Wpf.AvalonDock.Themes.VS2013;component/LightTheme.xaml",
UriKind.Relative);
}
}
}
|
tgjones/AvalonDock.Themes.VS2013
|
Xceed.Wpf.AvalonDock.Themes.VS2013/Vs2013LightTheme.cs
|
C#
|
apache-2.0
| 904 | 29.066667 | 82 | 0.56541 | false |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.bindings.java.transport.fmt.binary;
import java.io.IOException;
import org.apache.etch.bindings.java.msg.ComboValidator;
import org.apache.etch.bindings.java.msg.Field;
import org.apache.etch.bindings.java.msg.Message;
import org.apache.etch.bindings.java.msg.StructValue;
import org.apache.etch.bindings.java.msg.Type;
import org.apache.etch.bindings.java.msg.Validator;
import org.apache.etch.bindings.java.msg.ValueFactory;
import org.apache.etch.bindings.java.support.Validator_int;
import org.apache.etch.bindings.java.support.Validator_object;
import org.apache.etch.bindings.java.support.Validator_string;
import org.apache.etch.bindings.java.transport.ArrayValue;
import org.apache.etch.bindings.java.transport.TaggedDataInput;
import org.apache.etch.bindings.java.transport.fmt.TypeCode;
import org.apache.etch.util.Assertion;
import org.apache.etch.util.FlexBuffer;
/**
* BinaryTaggedDataInput has methods to support reading tagged
* values from an input buffer.
*/
final public class BinaryTaggedDataInput extends BinaryTaggedData
implements TaggedDataInput
{
/**
* Constructs the BinaryTaggedDataInput with a null buffer.
*
* @param vf the value factory for the service.
* @param uri the uri used to construct the transport stack.
*/
public BinaryTaggedDataInput( ValueFactory vf, String uri )
{
super( vf );
// don't have anything to do with uri yet.
}
private FlexBuffer buf;
private int lengthBudget;
/////////////////////////////
// TaggedDataInput methods //
/////////////////////////////
public Message readMessage( FlexBuffer buf ) throws IOException
{
this.buf = buf;
// lengthBudget is how many array elements total are reasonable to
// allocate while parsing this message. the largest value comes as each
// byte of the incoming message turned into an array element. the total
// will always be lower than this as often it takes multiple bytes to
// make a single array element. so, each time we make an array, we
// deduct the specified length from lengthBudget. if the result is
// negative, then either the incoming message is misformed or someone is
// trying to spoof us.
lengthBudget = buf.avail();
try
{
Message msg = startMessage();
readKeysAndValues( msg );
endMessage( msg );
return msg;
}
finally
{
this.buf = null;
lengthBudget = 0;
}
}
private StructValue readStruct() throws IOException
{
StructValue sv = startStruct();
readKeysAndValues( sv );
endStruct( sv );
return sv;
}
private ArrayValue readArray( Validator v ) throws IOException
{
ArrayValue av = startArray();
readValues( av, v );
endArray( av );
return av;
}
private void readKeysAndValues( StructValue sv )
throws IOException
{
Type t = sv.type();
while (true)
{
Field key = readField( t );
if (key == null)
break;
Validator v = t.getValidator( key );
if (v != null)
{
sv.put( key, readValue( v ) );
}
else
{
// read the value but ignore it.
Object obj = readValue( Validator_object.get( 0 ) );
if (false)
sv.put( key, obj );
}
}
}
private void readValues( ArrayValue av, Validator v ) throws IOException
{
Validator ev = v.elementValidator();
while (true)
{
Object value = readValue( ev, true );
if (value == NONE)
break;
av.add( value );
}
}
////////////////////////
// Main input methods //
////////////////////////
private Message startMessage() throws IOException
{
byte version = buf.getByte();
if (version != VERSION)
throw new IOException(
String.format(
"binary tagged data version mismatch: got %d expected %d",
version, VERSION ) );
Type t = readType();
int length = readLength();
return new Message( t, vf, length );
}
private void endMessage( Message msg )
{
// nothing to do, readKeysAndValues swallowed the NONE.
}
private StructValue startStruct() throws IOException
{
Type t = readType();
int length = readLength();
return new StructValue( t, vf, length );
}
private void endStruct( StructValue struct )
{
// nothing to do, readKeysAndValues swallowed the NONE.
}
@SuppressWarnings("deprecation")
private ArrayValue startArray() throws IOException
{
byte type = buf.getByte();
Type customStructType;
if (type == TypeCode.CUSTOM || type == TypeCode.STRUCT)
customStructType = readType();
else
customStructType = null;
int dim = readIntegerValue();
if (dim <= 0 || dim > Validator.MAX_NDIMS)
throw new IllegalArgumentException(
"dim <= 0 || dim > Validator.MAX_NDIMS" );
int length = readLength();
Object array = allocArrayValue( type, customStructType, dim, length );
return new ArrayValue( array, type, customStructType, dim );
}
private void endArray( ArrayValue array )
{
array.compact();
}
private Type readType() throws IOException
{
Object obj = readValue( intOrStrValidator, false );
if (obj instanceof Integer)
{
Integer id = (Integer) obj;
Type type = vf.getType( id );
if (type == null)
type = new Type( id, id.toString() );
return type;
}
Assertion.check( obj instanceof String, "obj instanceof String" );
String name = (String) obj;
Type type = vf.getType( name );
if (type == null)
type = new Type( name );
return type;
}
private Field readField( Type type ) throws IOException
{
Object obj = readValue( intOrStrValidator, true );
if (obj == NONE)
return null;
if (obj instanceof Integer)
{
Integer id = (Integer) obj;
Field field = type.getField( id );
if (field == null)
field = new Field( id, id.toString() );
return field;
}
Assertion.check( obj instanceof String, "obj instanceof String" );
String name = (String) obj;
Field field = type.getField( name );
if (field == null)
field = new Field( name );
return field;
}
private final Validator intOrStrValidator =
new ComboValidator( Validator_int.get( 0 ), Validator_string.get( 0 ) );
private int readLength() throws IOException
{
int length = readIntegerValue();
if (length < 0 || length > lengthBudget)
throw new IllegalArgumentException(
"length < 0 || length > lengthBudget" );
lengthBudget -= length;
return length;
}
private Integer readIntegerValue() throws IOException
{
return (Integer) readValue( intValidator );
}
private final Validator intValidator = Validator_int.get( 0 );
///////////////////////////
// LOCAL UTILITY METHODS //
///////////////////////////
private Object validateValue( Validator v, Object value )
{
// v == null more or less implies that a field is not known
// for a type. thus we don't care about the field value as
// we are going to ignore it. therefore, return null.
if (v == null)
return null;
if (value == null)
return null;
return v.validateValue( value );
}
private Object validateValue( Validator v, boolean noneOk, Object value )
{
if (noneOk && value == NONE)
return value;
return validateValue( v, value );
}
private Object readValue( Validator v ) throws IOException
{
return readValue( v, false );
}
@SuppressWarnings("deprecation")
private Object readValue( Validator v, boolean noneOk ) throws IOException
{
byte type = buf.getByte();
switch (type)
{
case TypeCode.NULL:
return validateValue( v, null );
case TypeCode.NONE:
return validateValue( v, noneOk, NONE );
case TypeCode.BOOLEAN_FALSE:
return validateValue( v, Boolean.FALSE );
case TypeCode.BOOLEAN_TRUE:
return validateValue( v, Boolean.TRUE );
case TypeCode.BYTE:
return validateValue( v, buf.getByte() );
case TypeCode.SHORT:
return validateValue( v, buf.getShort() );
case TypeCode.INT:
return validateValue( v, buf.getInt() );
case TypeCode.LONG:
return validateValue( v, buf.getLong() );
case TypeCode.FLOAT:
return validateValue( v, buf.getFloat() );
case TypeCode.DOUBLE:
return validateValue( v, buf.getDouble() );
case TypeCode.BYTES:
return validateValue( v, readBytes() );
// reserved for future use:
// case TypeCode.BOOLS:
// case TypeCode.SHORTS:
// case TypeCode.INTS:
// case TypeCode.LONGS:
// case TypeCode.FLOATS:
// case TypeCode.DOUBLES:
// throw new UnsupportedOperationException( "unsupported type code "+type );
case TypeCode.ARRAY:
return validateValue( v, fromArrayValue( readArray( v ) ) );
case TypeCode.EMPTY_STRING:
return validateValue( v, "" );
case TypeCode.STRING:
return validateValue( v, new String( readBytes(), vf.getStringEncoding() ) );
case TypeCode.STRUCT:
case TypeCode.CUSTOM:
return validateValue( v, vf.importCustomValue( readStruct() ) );
default:
if (type >= TypeCode.MIN_TINY_INT && type <= TypeCode.MAX_TINY_INT)
return validateValue( v, type );
throw new UnsupportedOperationException( "unsupported type code "+type );
}
}
private byte[] readBytes() throws IOException
{
int length = readLength();
byte[] b = new byte[length];
buf.getFully( b );
return b;
}
}
|
OBIGOGIT/etch
|
binding-java/runtime/src/main/java/org/apache/etch/bindings/java/transport/fmt/binary/BinaryTaggedDataInput.java
|
Java
|
apache-2.0
| 10,059 | 24.022388 | 81 | 0.672333 | false |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.PythonDialectsTokenSetProvider;
import com.jetbrains.python.psi.PyExpression;
import com.jetbrains.python.psi.PySliceExpression;
import com.jetbrains.python.psi.PySliceItem;
import com.jetbrains.python.psi.types.PyTupleType;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author yole
*/
public class PySliceExpressionImpl extends PyElementImpl implements PySliceExpression {
public PySliceExpressionImpl(ASTNode astNode) {
super(astNode);
}
@Nullable
@Override
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
final PyType type = context.getType(getOperand());
// TODO: Currently we don't evaluate the static range of the slice, so we have to return a generic tuple type without elements
if (type instanceof PyTupleType) {
return PyBuiltinCache.getInstance(this).getTupleType();
}
return type;
}
@NotNull
@Override
public PyExpression getOperand() {
return childToPsiNotNull(PythonDialectsTokenSetProvider.INSTANCE.getExpressionTokens(), 0);
}
@Nullable
@Override
public PySliceItem getSliceItem() {
return PsiTreeUtil.getChildOfType(this, PySliceItem.class);
}
}
|
IllusionRom-deprecated/android_platform_tools_idea
|
python/src/com/jetbrains/python/psi/impl/PySliceExpressionImpl.java
|
Java
|
apache-2.0
| 2,070 | 33.5 | 130 | 0.769082 | false |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.8"/>
<title>Hardware Locality (hwloc): Components and Plugins: Discovery backends</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">Hardware Locality (hwloc)
 <span id="projectnumber">1.10.1</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.8 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="pages.html"><span>Related Pages</span></a></li>
<li><a href="modules.html"><span>Modules</span></a></li>
<li><a href="annotated.html"><span>Data Structures</span></a></li>
</ul>
</div>
</div><!-- top -->
<div class="header">
<div class="summary">
<a href="#nested-classes">Data Structures</a> |
<a href="#enum-members">Enumerations</a> |
<a href="#func-members">Functions</a> </div>
<div class="headertitle">
<div class="title">Components and Plugins: Discovery backends</div> </div>
</div><!--header-->
<div class="contents">
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="nested-classes"></a>
Data Structures</h2></td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">struct  </td><td class="memItemRight" valign="bottom"><a class="el" href="a00029.html">hwloc_backend</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="enum-members"></a>
Enumerations</h2></td></tr>
<tr class="memitem:ga21513209613570877b6bfa9898106f2a"><td class="memItemLeft" align="right" valign="top">enum  </td><td class="memItemRight" valign="bottom"><a class="el" href="a00099.html#ga21513209613570877b6bfa9898106f2a">hwloc_backend_flag_e</a> { <a class="el" href="a00099.html#gga21513209613570877b6bfa9898106f2aadc96f2cf3bdd5d41e102dfa1e1976b24">HWLOC_BACKEND_FLAG_NEED_LEVELS</a>
}</td></tr>
<tr class="separator:ga21513209613570877b6bfa9898106f2a"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="func-members"></a>
Functions</h2></td></tr>
<tr class="memitem:ga330a0b581de4817d0cf1e7401db22436"><td class="memItemLeft" align="right" valign="top">struct <a class="el" href="a00029.html">hwloc_backend</a> * </td><td class="memItemRight" valign="bottom"><a class="el" href="a00099.html#ga330a0b581de4817d0cf1e7401db22436">hwloc_backend_alloc</a> (struct <a class="el" href="a00033.html">hwloc_disc_component</a> *component)</td></tr>
<tr class="separator:ga330a0b581de4817d0cf1e7401db22436"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:gaa4edf46c5d88eef53b3b8f572d88b9c5"><td class="memItemLeft" align="right" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="a00099.html#gaa4edf46c5d88eef53b3b8f572d88b9c5">hwloc_backend_enable</a> (struct hwloc_topology *topology, struct <a class="el" href="a00029.html">hwloc_backend</a> *backend)</td></tr>
<tr class="separator:gaa4edf46c5d88eef53b3b8f572d88b9c5"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ga505a5470745bf0b601f4d25a69601411"><td class="memItemLeft" align="right" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="a00099.html#ga505a5470745bf0b601f4d25a69601411">hwloc_backends_get_obj_cpuset</a> (struct <a class="el" href="a00029.html">hwloc_backend</a> *caller, struct <a class="el" href="a00036.html">hwloc_obj</a> *obj, <a class="el" href="a00096.html#gaa3c2bf4c776d603dcebbb61b0c923d84">hwloc_bitmap_t</a> cpuset)</td></tr>
<tr class="separator:ga505a5470745bf0b601f4d25a69601411"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:gad8ac8bba9ab6b9af423baba0c8337c6b"><td class="memItemLeft" align="right" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="a00099.html#gad8ac8bba9ab6b9af423baba0c8337c6b">hwloc_backends_notify_new_object</a> (struct <a class="el" href="a00029.html">hwloc_backend</a> *caller, struct <a class="el" href="a00036.html">hwloc_obj</a> *obj)</td></tr>
<tr class="separator:gad8ac8bba9ab6b9af423baba0c8337c6b"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<h2 class="groupheader">Enumeration Type Documentation</h2>
<a class="anchor" id="ga21513209613570877b6bfa9898106f2a"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">enum <a class="el" href="a00099.html#ga21513209613570877b6bfa9898106f2a">hwloc_backend_flag_e</a></td>
</tr>
</table>
</div><div class="memdoc">
<p>Backend flags. </p>
<table class="fieldtable">
<tr><th colspan="2">Enumerator</th></tr><tr><td class="fieldname"><a class="anchor" id="gga21513209613570877b6bfa9898106f2aadc96f2cf3bdd5d41e102dfa1e1976b24"></a>HWLOC_BACKEND_FLAG_NEED_LEVELS </td><td class="fielddoc">
<p>Levels should be reconnected before this backend discover() is used. </p>
</td></tr>
</table>
</div>
</div>
<h2 class="groupheader">Function Documentation</h2>
<a class="anchor" id="ga330a0b581de4817d0cf1e7401db22436"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">struct <a class="el" href="a00029.html">hwloc_backend</a>* hwloc_backend_alloc </td>
<td>(</td>
<td class="paramtype">struct <a class="el" href="a00033.html">hwloc_disc_component</a> * </td>
<td class="paramname"><em>component</em></td><td>)</td>
<td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Allocate a backend structure, set good default values, initialize backend->component and topology, etc. The caller will then modify whatever needed, and call <a class="el" href="a00099.html#gaa4edf46c5d88eef53b3b8f572d88b9c5" title="Enable a previously allocated and setup backend. ">hwloc_backend_enable()</a>. </p>
</div>
</div>
<a class="anchor" id="gaa4edf46c5d88eef53b3b8f572d88b9c5"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">int hwloc_backend_enable </td>
<td>(</td>
<td class="paramtype">struct hwloc_topology * </td>
<td class="paramname"><em>topology</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">struct <a class="el" href="a00029.html">hwloc_backend</a> * </td>
<td class="paramname"><em>backend</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Enable a previously allocated and setup backend. </p>
</div>
</div>
<a class="anchor" id="ga505a5470745bf0b601f4d25a69601411"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">int hwloc_backends_get_obj_cpuset </td>
<td>(</td>
<td class="paramtype">struct <a class="el" href="a00029.html">hwloc_backend</a> * </td>
<td class="paramname"><em>caller</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">struct <a class="el" href="a00036.html">hwloc_obj</a> * </td>
<td class="paramname"><em>obj</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype"><a class="el" href="a00096.html#gaa3c2bf4c776d603dcebbb61b0c923d84">hwloc_bitmap_t</a> </td>
<td class="paramname"><em>cpuset</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Used by backends discovery callbacks to request locality information from others. </p>
<p>Traverse the list of enabled backends until one has a get_obj_cpuset() method, and call it. </p>
</div>
</div>
<a class="anchor" id="gad8ac8bba9ab6b9af423baba0c8337c6b"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">int hwloc_backends_notify_new_object </td>
<td>(</td>
<td class="paramtype">struct <a class="el" href="a00029.html">hwloc_backend</a> * </td>
<td class="paramname"><em>caller</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">struct <a class="el" href="a00036.html">hwloc_obj</a> * </td>
<td class="paramname"><em>obj</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</div><div class="memdoc">
<p>Used by backends discovery callbacks to notify other backends of new objects. </p>
<p>Traverse the list of enabled backends (all but caller) and invoke their notify_new_object() method to notify them that a new object just got added to the topology.</p>
<p>Currently only used for notifying of new PCI device objects. </p>
</div>
</div>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Mon Jan 26 2015 10:38:04 for Hardware Locality (hwloc) by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.8
</small></address>
</body>
</html>
|
hildeth/chapel
|
third-party/hwloc/hwloc-1.10.1/doc/doxygen-doc/html/a00099.html
|
HTML
|
apache-2.0
| 10,530 | 48.669811 | 495 | 0.649668 | false |
/*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
using Ds3.Models;
using System;
using System.Net;
namespace Ds3.Calls
{
public class PutJobCompletedNotificationRegistrationSpectraS3Request : Ds3Request
{
public string NotificationEndPoint { get; private set; }
private HttpResponseFormatType? _format;
public HttpResponseFormatType? Format
{
get { return _format; }
set { WithFormat(value); }
}
private string _jobId;
public string JobId
{
get { return _jobId; }
set { WithJobId(value); }
}
private NamingConventionType? _namingConvention;
public NamingConventionType? NamingConvention
{
get { return _namingConvention; }
set { WithNamingConvention(value); }
}
private RequestType? _notificationHttpMethod;
public RequestType? NotificationHttpMethod
{
get { return _notificationHttpMethod; }
set { WithNotificationHttpMethod(value); }
}
public PutJobCompletedNotificationRegistrationSpectraS3Request WithFormat(HttpResponseFormatType? format)
{
this._format = format;
if (format != null)
{
this.QueryParams.Add("format", format.ToString());
}
else
{
this.QueryParams.Remove("format");
}
return this;
}
public PutJobCompletedNotificationRegistrationSpectraS3Request WithJobId(Guid? jobId)
{
this._jobId = jobId.ToString();
if (jobId != null)
{
this.QueryParams.Add("job_id", jobId.ToString());
}
else
{
this.QueryParams.Remove("job_id");
}
return this;
}
public PutJobCompletedNotificationRegistrationSpectraS3Request WithJobId(string jobId)
{
this._jobId = jobId;
if (jobId != null)
{
this.QueryParams.Add("job_id", jobId);
}
else
{
this.QueryParams.Remove("job_id");
}
return this;
}
public PutJobCompletedNotificationRegistrationSpectraS3Request WithNamingConvention(NamingConventionType? namingConvention)
{
this._namingConvention = namingConvention;
if (namingConvention != null)
{
this.QueryParams.Add("naming_convention", namingConvention.ToString());
}
else
{
this.QueryParams.Remove("naming_convention");
}
return this;
}
public PutJobCompletedNotificationRegistrationSpectraS3Request WithNotificationHttpMethod(RequestType? notificationHttpMethod)
{
this._notificationHttpMethod = notificationHttpMethod;
if (notificationHttpMethod != null)
{
this.QueryParams.Add("notification_http_method", notificationHttpMethod.ToString());
}
else
{
this.QueryParams.Remove("notification_http_method");
}
return this;
}
public PutJobCompletedNotificationRegistrationSpectraS3Request(string notificationEndPoint)
{
this.NotificationEndPoint = notificationEndPoint;
this.QueryParams.Add("notification_end_point", notificationEndPoint);
}
internal override HttpVerb Verb
{
get
{
return HttpVerb.POST;
}
}
internal override string Path
{
get
{
return "/_rest_/job_completed_notification_registration";
}
}
}
}
|
RachelTucker/ds3_net_sdk
|
Ds3/Calls/PutJobCompletedNotificationRegistrationSpectraS3Request.cs
|
C#
|
apache-2.0
| 4,771 | 29.012579 | 134 | 0.541396 | false |
using Newtonsoft.Json;
namespace Rabbit.WeiXin.MP.Api.Model
{
/// <summary>
/// 表示json错误的结果值。
/// </summary>
public sealed class JsonResultError
{
/// <summary>
/// 错误代码。
/// </summary>
[JsonProperty("errcode")]
public int ErrorCode { get; set; }
/// <summary>
/// 错误信息。
/// </summary>
[JsonProperty("errmsg")]
public string ErrorMessage { get; set; }
}
}
|
RabbitTeam/WeiXinSDK
|
src/Rabbit.WeiXin/MP/Api/Model/JsonResultError.cs
|
C#
|
apache-2.0
| 502 | 20.045455 | 48 | 0.508658 | false |
###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
from autobahn.wamp import error
class Error(RuntimeError):
"""
Base class for all exceptions related to WAMP.
"""
def __init__(self, reason):
"""
Constructor.
:param reason: Description of WAMP error that occurred (for logging purposes).
:type reason: str
"""
RuntimeError.__init__(self, reason)
class SessionNotReady(Error):
"""
"""
class SerializationError(Error):
"""
Exception raised when the WAMP serializer could not serialize the
application payload (args or kwargs for `CALL`, `PUBLISH`, etc).
"""
class ProtocolError(Error):
"""
Exception raised when WAMP protocol was violated. Protocol errors
are fatal and are handled by the WAMP implementation. They are
not supposed to be handled at the application level.
"""
class TransportLost(Error):
"""
Exception raised when transport was lost or is not connected.
"""
def __init__(self):
Error.__init__(self, "WAMP transport lost")
class ApplicationError(Error):
"""
Base class for all exceptions that can/may be handled
at the application level.
"""
NOT_AUTHORIZED = "wamp.error.not_authorized"
INVALID_ARGUMENT = "wamp.error.invalid_argument"
INVALID_URI = "wamp.error.invalid_uri"
DISCLOSE_ME_NOT_ALLOWED = "wamp.error.disclose_me.not_allowed"
PROCEDURE_ALREADY_EXISTS = "wamp.error.procedure_already_exists"
NO_SUCH_REALM = "wamp.error.no_such_realm"
SYSTEM_SHUTDOWN = "wamp.error.system_shutdown"
CLOSE_REALM = "wamp.error.close_realm"
GOODBYE_AND_OUT = "wamp.error.goodbye_and_out"
NO_SUCH_REGISTRATION = "wamp.error.no_such_registration"
NO_SUCH_SUBSCRIPTION = "wamp.error.no_such_subscription"
NO_SUCH_PROCEDURE = "wamp.error.no_such_procedure"
CANCELED = "wamp.error.canceled"
def __init__(self, error, *args, **kwargs):
"""
Constructor.
:param error: The URI of the error that occurred, e.g. `wamp.error.not_authorized`.
:type error: str
"""
Exception.__init__(self, *args)
self.kwargs = kwargs
self.error = error
def __str__(self):
return "ApplicationError({})".format(self.error)
#class GenericException(Exception)
@error("wamp.error.not_authorized")
class NotAuthorized(Exception):
"""
Not authorized to perform the respective action.
"""
@error("wamp.error.invalid_topic")
class InvalidTopic(Exception):
"""
The topic to publish or subscribe to is invalid.
"""
class CallError(ApplicationError):
"""
Remote procedure call errors.
"""
def __init__(self, error, problem):
"""
Constructor.
:param error: The URI of the error that occurred, e.g. "com.myapp.error.no_such_user".
:type error: str
:param problem: Any application-level details for the error that occurred.
:type problem: obj
"""
ApplicationError.__init__(self, error)
self.problem = problem
class CanceledError(ApplicationError):
"""
Error for canceled calls.
"""
def __init__(self):
"""
Constructor.
"""
ApplicationError.__init__(self, ApplicationError.CANCELED)
|
dbergan/AutobahnPython
|
autobahn/autobahn/wamp/exception.py
|
Python
|
apache-2.0
| 4,106 | 26.013158 | 92 | 0.622017 | false |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/**
* The "inspectTemplates" collection of methods.
* Typical usage is:
* <code>
* $dlpService = new Google_Service_DLP(...);
* $inspectTemplates = $dlpService->inspectTemplates;
* </code>
*/
class Google_Service_DLP_Resource_ProjectsInspectTemplates extends Google_Service_Resource
{
/**
* Creates an InspectTemplate for re-using frequently used configuration for
* inspecting content, images, and storage. (inspectTemplates.create)
*
* @param string $parent The parent resource name, for example projects/my-
* project-id or organizations/my-org-id.
* @param Google_Service_DLP_GooglePrivacyDlpV2CreateInspectTemplateRequest $postBody
* @param array $optParams Optional parameters.
* @return Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate
*/
public function create($parent, Google_Service_DLP_GooglePrivacyDlpV2CreateInspectTemplateRequest $postBody, $optParams = array())
{
$params = array('parent' => $parent, 'postBody' => $postBody);
$params = array_merge($params, $optParams);
return $this->call('create', array($params), "Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate");
}
/**
* Deletes an InspectTemplate. (inspectTemplates.delete)
*
* @param string $name Resource name of the organization and inspectTemplate to
* be deleted, for example `organizations/433245324/inspectTemplates/432452342`
* or projects/project-id/inspectTemplates/432452342.
* @param array $optParams Optional parameters.
* @return Google_Service_DLP_GoogleProtobufEmpty
*/
public function delete($name, $optParams = array())
{
$params = array('name' => $name);
$params = array_merge($params, $optParams);
return $this->call('delete', array($params), "Google_Service_DLP_GoogleProtobufEmpty");
}
/**
* Gets an InspectTemplate. (inspectTemplates.get)
*
* @param string $name Resource name of the organization and inspectTemplate to
* be read, for example `organizations/433245324/inspectTemplates/432452342` or
* projects/project-id/inspectTemplates/432452342.
* @param array $optParams Optional parameters.
* @return Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate
*/
public function get($name, $optParams = array())
{
$params = array('name' => $name);
$params = array_merge($params, $optParams);
return $this->call('get', array($params), "Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate");
}
/**
* Lists InspectTemplates. (inspectTemplates.listProjectsInspectTemplates)
*
* @param string $parent The parent resource name, for example projects/my-
* project-id or organizations/my-org-id.
* @param array $optParams Optional parameters.
*
* @opt_param string pageToken Optional page token to continue retrieval. Comes
* from previous call to `ListInspectTemplates`.
* @opt_param int pageSize Optional size of the page, can be limited by server.
* If zero server returns a page of max size 100.
* @return Google_Service_DLP_GooglePrivacyDlpV2ListInspectTemplatesResponse
*/
public function listProjectsInspectTemplates($parent, $optParams = array())
{
$params = array('parent' => $parent);
$params = array_merge($params, $optParams);
return $this->call('list', array($params), "Google_Service_DLP_GooglePrivacyDlpV2ListInspectTemplatesResponse");
}
/**
* Updates the InspectTemplate. (inspectTemplates.patch)
*
* @param string $name Resource name of organization and inspectTemplate to be
* updated, for example `organizations/433245324/inspectTemplates/432452342` or
* projects/project-id/inspectTemplates/432452342.
* @param Google_Service_DLP_GooglePrivacyDlpV2UpdateInspectTemplateRequest $postBody
* @param array $optParams Optional parameters.
* @return Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate
*/
public function patch($name, Google_Service_DLP_GooglePrivacyDlpV2UpdateInspectTemplateRequest $postBody, $optParams = array())
{
$params = array('name' => $name, 'postBody' => $postBody);
$params = array_merge($params, $optParams);
return $this->call('patch', array($params), "Google_Service_DLP_GooglePrivacyDlpV2InspectTemplate");
}
}
|
drthomas21/WordPress_Tutorial
|
wordpress_htdocs/wp-content/plugins/swg-youtube-vids/vendor/google/apiclient-services/src/Google/Service/DLP/Resource/ProjectsInspectTemplates.php
|
PHP
|
apache-2.0
| 4,803 | 43.06422 | 132 | 0.729752 | false |
# frozen_string_literal: true
require 'aws-sdk-autoscaling'
require 'aws-sdk-ec2'
require 'with_retries'
# Shared support code for AWS-based operations
#
module AwsSupport
def self.sorted_auto_scaling_instances(aws_autoscaling_group_id, aws_region)
aws = Aws::AutoScaling::Client.new(region: aws_region)
resp = ''
Retriable.with_retries(Aws::EC2::Errors::RequestLimitExceeded, limit: 5, sleep: 10) do
resp = aws.describe_auto_scaling_groups(auto_scaling_group_names: [
aws_autoscaling_group_id.to_s
])
end
resp.auto_scaling_groups[0].instances.map(&:instance_id).sort
end
def self.preferred_instance_ip_address(instance_id, aws_region)
aws = Aws::EC2::Client.new(region: aws_region)
resp = ''
Retriable.with_retries(Aws::EC2::Errors::RequestLimitExceeded, limit: 5, sleep: 10) do
resp = aws.describe_instances(instance_ids: [instance_id.to_s])
end
ssh_master_ip = if resp.reservations[0].instances[0].network_interfaces[0].association.nil?
resp.reservations[0].instances[0].network_interfaces[0].private_ip_address
else
resp.reservations[0].instances[0].network_interfaces[0].association.public_ip
end
ssh_master_ip
end
def self.create_aws_key_pairs(aws_region)
client = Aws::EC2::Client.new(region: aws_region)
key_pair_name = NameGenerator.generate_short_name
dir_home = `echo ${HOME}`.chomp
ssh_pub_key = File.read("#{dir_home}/.ssh/id_rsa.pub")
resp = ''
Retriable.with_retries(Aws::EC2::Errors::RequestLimitExceeded, limit: 5, sleep: 10) do
resp = client.import_key_pair(dry_run: false,
key_name: key_pair_name,
public_key_material: ssh_pub_key)
end
resp.key_name
end
def self.delete_aws_key_pairs(key_pair_name, aws_region)
client = Aws::EC2::Client.new(region: aws_region)
Retriable.with_retries(Aws::EC2::Errors::RequestLimitExceeded, limit: 3, sleep: 10) do
client.delete_key_pair(key_name: key_pair_name,
dry_run: false)
end
end
end
|
alexsomesan/tectonic-installer
|
tests/rspec/lib/aws_support.rb
|
Ruby
|
apache-2.0
| 2,258 | 37.271186 | 99 | 0.620461 | false |
////////////////////////////////////////////////////////////////////////////////
/// @brief abstract actions
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2014 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Dr. Frank Celler
/// @author Copyright 2014, ArangoDB GmbH, Cologne, Germany
/// @author Copyright 2012-2014, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#ifndef ARANGODB_ACTIONS_ACTIONS_H
#define ARANGODB_ACTIONS_ACTIONS_H 1
#include "Basics/Common.h"
#include "Basics/Mutex.h"
// -----------------------------------------------------------------------------
// --SECTION-- forward declarations
// -----------------------------------------------------------------------------
struct TRI_vocbase_s;
namespace triagens {
namespace rest {
class HttpResponse;
class HttpRequest;
}
}
// -----------------------------------------------------------------------------
// --SECTION-- public types
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief action result
////////////////////////////////////////////////////////////////////////////////
class TRI_action_result_t {
public:
TRI_action_result_t ()
: isValid(false),
requeue(false),
canceled(false),
response(nullptr),
sleep(0.0) {
}
// Please be careful here: In the beginning we had "bool requeue" after
// the response pointer in this struct. However, this triggered a nasty
// compiler bug in Visual Studio Express 2013 which lead to the fact
// that sometimes requeue was involuntarily flipped to "true" during
// a return of a TRI_action_result_t from a function call.
// In this order it seems to work.
// Details: v8-actions.cpp: v8_action_t::execute returns a TRI_action_result_t
// to RestActionHandler::executeAction and suddenly requeue is true.
bool isValid;
bool requeue;
bool canceled;
triagens::rest::HttpResponse* response;
double sleep;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief action descriptor
////////////////////////////////////////////////////////////////////////////////
class TRI_action_t {
public:
TRI_action_t ()
: _type(),
_url(),
_urlParts(0),
_isPrefix(false),
_allowUseDatabase(false) {
}
virtual ~TRI_action_t () {}
virtual TRI_action_result_t execute (struct TRI_vocbase_s*,
triagens::rest::HttpRequest*,
triagens::basics::Mutex* dataLock,
void** data) = 0;
virtual bool cancel (triagens::basics::Mutex* dataLock,
void** data) = 0;
std::string _type;
std::string _url;
size_t _urlParts;
bool _isPrefix;
bool _allowUseDatabase;
};
// -----------------------------------------------------------------------------
// --SECTION-- public types
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief defines an action
////////////////////////////////////////////////////////////////////////////////
TRI_action_t* TRI_DefineActionVocBase (std::string const& name, TRI_action_t* action);
////////////////////////////////////////////////////////////////////////////////
/// @brief looks up an action
////////////////////////////////////////////////////////////////////////////////
TRI_action_t* TRI_LookupActionVocBase (triagens::rest::HttpRequest* request);
////////////////////////////////////////////////////////////////////////////////
/// @brief deletes all defined actions
////////////////////////////////////////////////////////////////////////////////
void TRI_CleanupActions ();
#endif
// -----------------------------------------------------------------------------
// --SECTION-- END-OF-FILE
// -----------------------------------------------------------------------------
// Local Variables:
// mode: outline-minor
// outline-regexp: "/// @brief\\|/// {@inheritDoc}\\|/// @page\\|// --SECTION--\\|/// @\\}"
// End:
|
morsdatum/ArangoDB
|
arangod/Actions/actions.h
|
C
|
apache-2.0
| 5,237 | 34.147651 | 91 | 0.417033 | false |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.server.options;
import java.util.HashSet;
import java.util.Set;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.server.options.OptionValue.Kind;
import org.apache.drill.exec.server.options.OptionValue.OptionType;
import static com.google.common.base.Preconditions.checkArgument;
public class TypeValidators {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TypeValidators.class);
public static class PositiveLongValidator extends LongValidator {
private final long max;
public PositiveLongValidator(String name, long max, long def) {
super(name, def);
this.max = max;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
if (v.num_val > max || v.num_val < 1) {
throw UserException.validationError()
.message(String.format("Option %s must be between %d and %d.", getOptionName(), 1, max))
.build(logger);
}
}
}
public static class PowerOfTwoLongValidator extends PositiveLongValidator {
public PowerOfTwoLongValidator(String name, long max, long def) {
super(name, max, def);
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
if (!isPowerOfTwo(v.num_val)) {
throw UserException.validationError()
.message(String.format("Option %s must be a power of two.", getOptionName()))
.build(logger);
}
}
private static boolean isPowerOfTwo(long num) {
return (num & (num - 1)) == 0;
}
}
public static class RangeDoubleValidator extends DoubleValidator {
private final double min;
private final double max;
public RangeDoubleValidator(String name, double min, double max, double def) {
super(name, def);
this.min = min;
this.max = max;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
if (v.float_val > max || v.float_val < min) {
throw UserException.validationError()
.message(String.format("Option %s must be between %f and %f.", getOptionName(), min, max))
.build(logger);
}
}
}
public static class MinRangeDoubleValidator extends RangeDoubleValidator {
private final double min;
private final double max;
private final String maxValidatorName;
public MinRangeDoubleValidator(String name, double min, double max, double def, String maxValidatorName) {
super(name, min, max, def);
this.min = min;
this.max = max;
this.maxValidatorName = maxValidatorName;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
OptionValue maxValue = manager.getOption(maxValidatorName);
if (v.float_val > maxValue.float_val) {
throw UserException.validationError()
.message(String.format("Option %s must be less than or equal to Option %s",
getOptionName(), maxValidatorName))
.build(logger);
}
}
}
public static class MaxRangeDoubleValidator extends RangeDoubleValidator {
private final double min;
private final double max;
private final String minValidatorName;
public MaxRangeDoubleValidator(String name, double min, double max, double def, String minValidatorName) {
super(name, min, max, def);
this.min = min;
this.max = max;
this.minValidatorName = minValidatorName;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
OptionValue minValue = manager.getOption(minValidatorName);
if (v.float_val < minValue.float_val) {
throw UserException.validationError()
.message(String.format("Option %s must be greater than or equal to Option %s",
getOptionName(), minValidatorName))
.build(logger);
}
}
}
public static class BooleanValidator extends TypeValidator {
public BooleanValidator(String name, boolean def) {
super(name, Kind.BOOLEAN, OptionValue.createBoolean(OptionType.SYSTEM, name, def));
}
}
public static class StringValidator extends TypeValidator {
public StringValidator(String name, String def) {
super(name, Kind.STRING, OptionValue.createString(OptionType.SYSTEM, name, def));
}
}
public static class LongValidator extends TypeValidator {
public LongValidator(String name, long def) {
super(name, Kind.LONG, OptionValue.createLong(OptionType.SYSTEM, name, def));
}
}
public static class DoubleValidator extends TypeValidator {
public DoubleValidator(String name, double def) {
super(name, Kind.DOUBLE, OptionValue.createDouble(OptionType.SYSTEM, name, def));
}
}
public static class RangeLongValidator extends LongValidator {
private final long min;
private final long max;
public RangeLongValidator(String name, long min, long max, long def) {
super(name, def);
this.min = min;
this.max = max;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
if (v.num_val > max || v.num_val < min) {
throw UserException.validationError()
.message(String.format("Option %s must be between %d and %d.", getOptionName(), min, max))
.build(logger);
}
}
}
public static class AdminOptionValidator extends StringValidator {
public AdminOptionValidator(String name, String def) {
super(name, def);
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
if (v.type != OptionType.SYSTEM) {
throw UserException.validationError()
.message("Admin related settings can only be set at SYSTEM level scope. Given scope '%s'.", v.type)
.build(logger);
}
super.validate(v, manager);
}
}
/**
* Validator that checks if the given value is included in a list of acceptable values. Case insensitive.
*/
public static class EnumeratedStringValidator extends StringValidator {
private final Set<String> valuesSet = new HashSet<>();
public EnumeratedStringValidator(String name, String def, String... values) {
super(name, def);
for (String value : values) {
valuesSet.add(value.toLowerCase());
}
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
super.validate(v, manager);
if (!valuesSet.contains(v.string_val.toLowerCase())) {
throw UserException.validationError()
.message(String.format("Option %s must be one of: %s.", getOptionName(), valuesSet))
.build(logger);
}
}
}
public static abstract class TypeValidator extends OptionValidator {
private final Kind kind;
private final OptionValue defaultValue;
public TypeValidator(final String name, final Kind kind, final OptionValue defValue) {
super(name);
checkArgument(defValue.type == OptionType.SYSTEM, "Default value must be SYSTEM type.");
this.kind = kind;
this.defaultValue = defValue;
}
@Override
public OptionValue getDefault() {
return defaultValue;
}
@Override
public void validate(final OptionValue v, final OptionManager manager) {
if (v.kind != kind) {
throw UserException.validationError()
.message(String.format("Option %s must be of type %s but you tried to set to %s.", getOptionName(),
kind.name(), v.kind.name()))
.build(logger);
}
}
}
}
|
myroch/drill
|
exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
|
Java
|
apache-2.0
| 8,714 | 33.442688 | 111 | 0.673629 | false |
using Google.GData.Photos;
using NUnit.Framework;
using Google.GData.Client.UnitTests;
using System;
namespace Google.GData.Client.UnitTests.Picasa
{
/// <summary>
///This is a test class for GPhotoBytesUsedTest and is intended
///to contain all GPhotoBytesUsedTest Unit Tests
///</summary>
[TestFixture][Category("Picasa")]
public class GPhotoBytesUsedTest
{
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
///A test for GPhotoBytesUsed Constructor
///</summary>
[Test]
public void GPhotoBytesUsedConstructorTest1()
{
GPhotoBytesUsed target = new GPhotoBytesUsed();
Assert.IsNotNull(target);
Assert.IsTrue(String.IsNullOrEmpty(target.Value));
}
/// <summary>
///A test for GPhotoBytesUsed Constructor
///</summary>
[Test]
public void GPhotoBytesUsedConstructorTest()
{
string initValue = "TestValue";
GPhotoBytesUsed target = new GPhotoBytesUsed(initValue);
Assert.AreEqual(initValue, target.Value);
}
}
}
|
michael-jia-sage/libgoogle
|
src/unittests/picasa/GPhotoBytesUsedTest.cs
|
C#
|
apache-2.0
| 2,564 | 26.466667 | 84 | 0.541764 | false |
/**
* @license
* Copyright 2020 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.nanos.fs.fileDropZone',
name: 'FileDropZone',
extends: 'foam.u2.Controller',
documentation: 'A default zone to drag & drop files',
requires: [
'foam.log.LogLevel',
'foam.blob.BlobBlob',
'foam.nanos.fs.File',
'foam.nanos.fs.FileArray'
],
imports: [
'ctrl',
'user',
'fileTypeDAO'
],
exports: [
'allowRemoval',
'removeFile',
'highlight'
],
css: `
^ {
box-sizing: border-box;
max-width: 365px;
padding: 16px;
border: 2px dashed #8e9090;
border-radius: 3px;
box-shadow: inset 0 1px 2px 0 rgba(116, 122, 130, 0.21);
}
^instruction-container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
text-align: center;
height: 228px;
}
^instruction-container.selection {
height: 172px;
margin-bottom: 16px;
}
^input {
display: none;
}
^title {
font-size: 16px;
font-weight: 900;
margin: 0;
}
^or {
display: inline-block;
vertical-align: bottom;
margin: 0;
margin-top: 8px;
}
^link {
display: inline-block;
cursor: pointer;
color: /*%PRIMARY3%*/ #406dea;
margin: 0;
margin-left: 5px;
}
^caption-container {
margin-top: 24px;
position: relative;
top: 60px;
}
^caption {
display: inline-block;
font-size: 10px;
color: #525455;
margin: 0;
margin-top: 4px;
}
`,
messages: [
{ name: 'LABEL_DEFAULT_TITLE', message: 'Drag your file here' },
{ name: 'LABEL_OR', message: 'or' },
{ name: 'LABEL_BROWSE', message: 'select from your device' },
{ name: 'LABEL_SUPPORTED', message: 'Supported file types:' },
{ name: 'LABEL_MAX_SIZE', message: 'Max size:' },
{ name: 'ERROR_FILE_TYPE', message: 'Invalid file type' },
{ name: 'ERROR_FILE_SIZE', message: 'File size exceeds 15MB' }
],
properties: [
{
class: 'String',
name: 'title'
},
{
name: 'supportedFormats',
documentation: `Please use the following format: { 'image/jpg' : 'JPG' }`,
value: {}
},
{
class: 'Boolean',
name: 'isMultipleFiles',
value: true
},
{
class: 'Boolean',
name: 'allowRemoval',
value: true
},
{
class: 'foam.nanos.fs.FileArray',
name: 'files',
factory: function() {
return [];
}
},
{
class: 'Long',
name: 'maxSize',
value: 15,
documentation: 'Dictates maximum file size in MB (Megabyte).'
},
{
name: 'onFilesChanged',
class: 'Function',
documentation: 'When a file has been selected/changed/removed, this function will be called. (OPTIONAL)'
},
{
name: 'selected'
}
],
methods: [
async function initE() {
this.SUPER();
var self = this;
if ( Object.keys(this.supportedFormats).length == 0 ) {
let s = await this.fileTypeDAO.select()
s.array.forEach(type => {
this.supportedFormats[type.toSummary()] = type.abbreviation
})
}
this
.addClass(this.myClass())
.start().addClass(this.myClass('instruction-container')).enableClass('selection', this.files$.map((v) => { return v.length > 0; }))
.start('p').addClass(this.myClass('title')).add(this.title || this.LABEL_DEFAULT_TITLE).end()
.start().addClass(this.myClass('browse-container'))
.start('p').addClass(this.myClass('or')).add(this.LABEL_OR).end()
.start('p').addClass(this.myClass('link'))
.add(this.LABEL_BROWSE)
.on('click', this.onAddAttachmentClicked)
.end()
.end()
.start().addClass(this.myClass('caption-container')).hide(this.files$.map((v) => { return v.length > 0; }))
.start()
.start('p').addClass(this.myClass('caption')).add(this.LABEL_SUPPORTED).end()
.start('p').addClass(self.myClass('caption')).add(this.getSupportedTypes(true)).end()
.end()
.start()
.start('p').addClass(this.myClass('caption')).add(this.LABEL_MAX_SIZE + ' ' + this.maxSize + 'MB').end()
.end()
.end()
.end()
.add(this.slot(function(files) {
var e = this.E();
for ( var i = 0; i < files.length; i++ ) {
e.tag({
class: 'foam.nanos.fs.fileDropZone.FileCard',
data: files[i],
selected: this.selected,
index: i
});
}
return e;
}, this.files$))
.on('drop', this.onDrop)
.on('dragover', (e) => { e.preventDefault() })
.on('dragenter', (e) => { e.preventDefault() })
.callIf(this.isMultipleFiles, function() {
this.start('input')
.addClass(this.myClass('input'))
.addClass(this.instanceClass(`input`))
.attrs({
type: 'file',
accept: this.getSupportedTypes(),
multiple: 'multiple'
})
.on('change', this.onChange)
.end();
})
.callIf(! this.isMultipleFiles, function() {
this.start('input')
.addClass(this.myClass('input'))
.addClass(this.instanceClass(`input`))
.attrs({
type: 'file',
accept: this.getSupportedTypes()
})
.on('change', this.onChange)
.end();
});
},
function getSupportedTypes(readable) {
var supportedTypes = Object.keys(this.supportedFormats);
var constructedString = '';
if ( readable ) {
supportedTypes.forEach((type, index) => {
constructedString += this.supportedFormats[type];
if ( index < supportedTypes.length - 1 ) {
constructedString += ', ';
}
});
} else {
supportedTypes.forEach((type, index) => {
constructedString += type;
if ( index < supportedTypes.length - 1 ) {
constructedString += ', ';
}
});
}
return constructedString;
},
function addFiles(files) {
var errors = false;
for ( var i = 0 ; i < files.length ; i++ ) {
// skip files that exceed limit
if ( files[i].size > ( this.maxSize * 1024 * 1024 ) ) {
if ( ! errors ) errors = true;
ctrl.notify(this.ERROR_FILE_SIZE, '', this.LogLevel.ERROR, true);
continue;
}
var isIncluded = false;
for ( var j = 0; j < this.files.length; j++ ) {
if ( this.files[j].filename.localeCompare(files[i].name) === 0 ) {
isIncluded = true;
break;
}
}
if ( isIncluded ) continue;
if ( this.isMultipleFiles ) {
var f = this.File.create({
owner: this.user.id,
filename: files[i].name,
filesize: files[i].size,
mimeType: files[i].type,
data: this.BlobBlob.create({
blob: files[i]
})
});
this.files.push(f);
} else {
this.files[0] = this.File.create({
owner: this.user.id,
filename: files[i].name,
filesize: files[i].size,
mimeType: files[i].type,
data: this.BlobBlob.create({
blob: files[i]
})
});
}
}
this.selected = this.files.length - 1;
this.files = Array.from(this.files);
},
function isFileType(file) {
return ( file.type in this.supportedFormats );
},
function removeFile(atIndex) {
var files = Array.from(this.files);
files.splice(atIndex, 1);
if ( this.selected === files.length )
this.selected = files.length - 1;
this.files = files;
this.document.querySelector('.' + this.instanceClass(`input`)).value = null;
},
function highlight(atIndex) {
this.selected = atIndex;
this.files = this.files;
}
],
listeners: [
function onAddAttachmentClicked(e) {
if ( typeof e.target != 'undefined' ) {
if ( e.target.tagName == 'P' && e.target.tagName != 'A' ) {
this.document.querySelector('.' + this.instanceClass(`input`)).click();
}
} else {
// For IE browser
if ( e.srcElement.tagName == 'P' && e.srcElement.tagName != 'A' ) {
this.document.querySelector('.' + this.instanceClass(`input`)).click();
}
}
},
function onDrop(e) {
e.preventDefault();
var files = [];
var inputFile;
if ( e.dataTransfer.items ) {
inputFile = e.dataTransfer.items;
if ( inputFile ) {
for ( var i = 0 ; i < inputFile.length ; i++ ) {
// If dropped items aren't files, reject them
if ( inputFile[i].kind === 'file' ) {
var file = inputFile[i].getAsFile();
if ( this.isFileType(file) ) {
files.push(file);
} else {
ctrl.notify(this.ERROR_FILE_TYPE, '', this.LogLevel.ERROR, true);
}
}
}
}
} else if ( e.dataTransfer.files ) {
inputFile = e.dataTransfer.files;
for ( var i = 0 ; i < inputFile.length ; i++ ) {
var file = inputFile[i];
if ( this.isFileType(file) ) {
files.push(file);
} else {
ctrl.notify(this.ERROR_FILE_TYPE, '', this.LogLevel.ERROR, true);
}
}
}
this.addFiles(files);
},
function onChange(e) {
var files = e.target.files;
this.addFiles(files);
// Remove all temporary files in the element.target.files
this.document.querySelector('.' + this.instanceClass(`input`)).value = null;
this.onFilesChanged();
}
]
});
|
jacksonic/vjlofvhjfgm
|
src/foam/nanos/fs/fileDropZone/FileDropZone.js
|
JavaScript
|
apache-2.0
| 10,209 | 27.676966 | 139 | 0.512783 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.stream.*;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* Perform operations on Kubernetes Persistent Volumes Claims and get notified
* on Persistent Volumes Claim changes.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface KubernetesPersistentVolumesClaimsEndpointBuilderFactory {
/**
* Builder for endpoint for the Kubernetes Persistent Volume Claim
* component.
*/
public interface KubernetesPersistentVolumesClaimsEndpointBuilder
extends
EndpointProducerBuilder {
default AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder advanced() {
return (AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder) this;
}
/**
* The Kubernetes API Version to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param apiVersion the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder apiVersion(
String apiVersion) {
doSetProperty("apiVersion", apiVersion);
return this;
}
/**
* The dns domain, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param dnsDomain the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder dnsDomain(
String dnsDomain) {
doSetProperty("dnsDomain", dnsDomain);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option is a:
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: producer
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder kubernetesClient(
io.fabric8.kubernetes.client.KubernetesClient kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option will be converted to a
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: producer
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder kubernetesClient(
String kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Producer operation to do on Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder operation(
String operation) {
doSetProperty("operation", operation);
return this;
}
/**
* The port name, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param portName the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder portName(
String portName) {
doSetProperty("portName", portName);
return this;
}
/**
* The port protocol, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: tcp
* Group: producer
*
* @param portProtocol the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder portProtocol(
String portProtocol) {
doSetProperty("portProtocol", portProtocol);
return this;
}
/**
* The CA Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertData the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder caCertData(
String caCertData) {
doSetProperty("caCertData", caCertData);
return this;
}
/**
* The CA Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertFile the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder caCertFile(
String caCertFile) {
doSetProperty("caCertFile", caCertFile);
return this;
}
/**
* The Client Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertData the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientCertData(
String clientCertData) {
doSetProperty("clientCertData", clientCertData);
return this;
}
/**
* The Client Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertFile the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientCertFile(
String clientCertFile) {
doSetProperty("clientCertFile", clientCertFile);
return this;
}
/**
* The Key Algorithm used by the client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyAlgo the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientKeyAlgo(
String clientKeyAlgo) {
doSetProperty("clientKeyAlgo", clientKeyAlgo);
return this;
}
/**
* The Client Key data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyData the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientKeyData(
String clientKeyData) {
doSetProperty("clientKeyData", clientKeyData);
return this;
}
/**
* The Client Key file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyFile the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientKeyFile(
String clientKeyFile) {
doSetProperty("clientKeyFile", clientKeyFile);
return this;
}
/**
* The Client Key Passphrase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyPassphrase the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder clientKeyPassphrase(
String clientKeyPassphrase) {
doSetProperty("clientKeyPassphrase", clientKeyPassphrase);
return this;
}
/**
* The Auth Token.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthToken the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder oauthToken(
String oauthToken) {
doSetProperty("oauthToken", oauthToken);
return this;
}
/**
* Password to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder password(
String password) {
doSetProperty("password", password);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder trustCerts(
Boolean trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option will be converted to a
* <code>java.lang.Boolean</code> type.
*
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder trustCerts(
String trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Username to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder username(
String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint for the Kubernetes Persistent Volume Claim
* component.
*/
public interface AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder
extends
EndpointProducerBuilder {
default KubernetesPersistentVolumesClaimsEndpointBuilder basic() {
return (KubernetesPersistentVolumesClaimsEndpointBuilder) this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder connectionTimeout(
Integer connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Connection timeout in milliseconds to use when making requests to the
* Kubernetes API server.
*
* The option will be converted to a
* <code>java.lang.Integer</code> type.
*
* Group: advanced
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder connectionTimeout(
String connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
}
public interface KubernetesPersistentVolumesClaimsBuilders {
/**
* Kubernetes Persistent Volume Claim (camel-kubernetes)
* Perform operations on Kubernetes Persistent Volumes Claims and get
* notified on Persistent Volumes Claim changes.
*
* Category: container,cloud,paas
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* Syntax: <code>kubernetes-persistent-volumes-claims:masterUrl</code>
*
* Path parameter: masterUrl (required)
* Kubernetes Master url
*
* @param path masterUrl
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder kubernetesPersistentVolumesClaims(
String path) {
return KubernetesPersistentVolumesClaimsEndpointBuilderFactory.endpointBuilder("kubernetes-persistent-volumes-claims", path);
}
/**
* Kubernetes Persistent Volume Claim (camel-kubernetes)
* Perform operations on Kubernetes Persistent Volumes Claims and get
* notified on Persistent Volumes Claim changes.
*
* Category: container,cloud,paas
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* Syntax: <code>kubernetes-persistent-volumes-claims:masterUrl</code>
*
* Path parameter: masterUrl (required)
* Kubernetes Master url
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path masterUrl
* @return the dsl builder
*/
default KubernetesPersistentVolumesClaimsEndpointBuilder kubernetesPersistentVolumesClaims(
String componentName,
String path) {
return KubernetesPersistentVolumesClaimsEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static KubernetesPersistentVolumesClaimsEndpointBuilder endpointBuilder(
String componentName,
String path) {
class KubernetesPersistentVolumesClaimsEndpointBuilderImpl extends AbstractEndpointBuilder implements KubernetesPersistentVolumesClaimsEndpointBuilder, AdvancedKubernetesPersistentVolumesClaimsEndpointBuilder {
public KubernetesPersistentVolumesClaimsEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new KubernetesPersistentVolumesClaimsEndpointBuilderImpl(path);
}
}
|
christophd/camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesPersistentVolumesClaimsEndpointBuilderFactory.java
|
Java
|
apache-2.0
| 18,716 | 36.063366 | 218 | 0.594465 | false |
/******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#pragma once
#include <vector>
#include "Eigen/Core"
#include "modules/localization/msf/local_pyramid_map/base_map/base_map_node.h"
#include "modules/localization/msf/local_pyramid_map/pyramid_map/pyramid_map_matrix.h"
namespace apollo {
namespace localization {
namespace msf {
namespace pyramid_map {
class PyramidMapNode : public BaseMapNode {
public:
PyramidMapNode();
~PyramidMapNode();
public:
virtual void Init(const BaseMapConfig* map_config);
virtual void Init(const BaseMapConfig* map_config, const MapNodeIndex& index,
bool create_map_cells = true);
/**@brief Propagate the data to the coarse resolution by check. */
void BottomUpSafe();
/**@brief Propagate the data to the coarse resolution.
* only update count, intensity, intensity var and altitude
*/
void BottomUpBase();
/**@brief Add the value of a pixel in the map node if the pixel in the node.
* @param <coordinate> The 3D global coordinate.
* @param <intensity> The reflectance intensity.
* @param <return> True, if pixel in the bound of the node, else False.
* */
bool AddValueIfInBound(const Eigen::Vector3d& coordinate,
unsigned char intensity, unsigned int level = 0);
/**@brief Add the value of a pixel in the map node if the pixel in the node.
* @param <coordinates> The 3D global coordinates.
* @param <intensities> The reflectance intensities.
* */
void AddValueIfInBound(const std::vector<Eigen::Vector3d>& coordinates,
const std::vector<unsigned char>& intensity,
unsigned int level = 0);
/**@brief Given the global coordinate, get the local 2D coordinate of the map
* cell matrix.
* <return> If global coordinate (x, y) belongs to this map node. */
bool GetCoordinate(const Eigen::Vector2d& coordinate, unsigned int level,
unsigned int* x, unsigned int* y) const;
/**@brief Given the local 2D coordinate, return the global coordinate. */
Eigen::Vector2d GetCoordinate(unsigned int level, unsigned int x,
unsigned int y) const;
virtual bool GetCoordinate(const Eigen::Vector2d& coordinate, unsigned int* x,
unsigned int* y) const;
virtual bool GetCoordinate(const Eigen::Vector3d& coordinate, unsigned int* x,
unsigned int* y) const;
virtual Eigen::Vector2d GetCoordinate(unsigned int x, unsigned int y) const;
/**@brief Given the 3D global coordinate,
* get the map cell intensity with check. */
float GetIntensitySafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell variance of the intensity with check. */
float GetIntensityVarSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's average altitude with check. */
float GetAltitudeSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's variance of the altitude with check. */
float GetAltitudeVarSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's average ground altitude with check. */
float GetGroundAltitudeSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's count of the samples with check. */
unsigned int GetCountSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's count of the ground samples with check. */
unsigned int GetGroundCountSafe(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell intensity without check. */
float GetIntensity(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell variance of the intensity without check. */
float GetIntensityVar(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's average altitude without check. */
float GetAltitude(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's variance of the altitude without check. */
float GetAltitudeVar(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's average ground altitude without check. */
float GetGroundAltitude(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's count of the samples without check. */
unsigned int GetCount(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Given the 3D global coordinate,
* get the map cell's count of the ground samples without check. */
unsigned int GetGroundCount(const Eigen::Vector3d& coordinate,
unsigned int level = 0) const;
/**@brief Compute mean intensity. */
double ComputeMeanIntensity(unsigned int level = 0);
private:
std::vector<float> resolutions_mr_;
};
} // namespace pyramid_map
} // namespace msf
} // namespace localization
} // namespace apollo
|
ApolloAuto/apollo
|
modules/localization/msf/local_pyramid_map/pyramid_map/pyramid_map_node.h
|
C
|
apache-2.0
| 6,634 | 44.438356 | 86 | 0.654809 | false |
<?php
if (PHP_SAPI !== "cli") {
header('HTTP/1.1 403 Forbidden');
exit('error: 403 Access Denied');
}
if (PHP_OS === "WINNT") {
exec('chcp 65001');
}
require(__DIR__ . '/library/URL.class.php');
// 安装
$InstallRequestParameter = array(
"Language" => "zh-cn",
"DBHost" => "127.0.0.1",
"DBName" => "carbon",
"DBUser" => "root",
"DBPassword" => "",
"EnableMemcache" => "true",
"MemCachePrefix" => "carbon_",
"SearchServer" => "",
"SearchPort" => ""
);
$InstallResponse = URL::Post("http://localhost:8080/install/index.php", $InstallRequestParameter);
if (file_exists(__DIR__ . '/config.php')) {
$ConfigContent = file_get_contents(__DIR__ . '/config.php');
$ConfigContent = str_replace("define('DEBUG_MODE', false)", "define('DEBUG_MODE', true)", $ConfigContent);
file_put_contents(__DIR__ . '/config.php', $ConfigContent);
echo "\n\n\033[32m Installation success \033[0m\n\n";
exit(0);
} else {
echo "\n\n\033[31m Installation failed \033[0m\n\n";
exit(1);
}
|
hddata/Tiekaa
|
test-install.php
|
PHP
|
apache-2.0
| 1,018 | 26.166667 | 107 | 0.598619 | false |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ResourceManager.Models
{
using System;
using System.Linq;
using System.Collections.Generic;
using Newtonsoft.Json;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Microsoft.Rest.Azure;
/// <summary>
/// Deployment operation parameters.
/// </summary>
public partial class Deployment
{
/// <summary>
/// Initializes a new instance of the Deployment class.
/// </summary>
public Deployment() { }
/// <summary>
/// Initializes a new instance of the Deployment class.
/// </summary>
public Deployment(DeploymentProperties properties = default(DeploymentProperties))
{
Properties = properties;
}
/// <summary>
/// The deployment properties.
/// </summary>
[JsonProperty(PropertyName = "properties")]
public DeploymentProperties Properties { get; set; }
/// <summary>
/// Validate the object. Throws ValidationException if validation fails.
/// </summary>
public virtual void Validate()
{
if (this.Properties != null)
{
this.Properties.Validate();
}
}
}
}
|
smithab/azure-sdk-for-net
|
src/ResourceManagement/Resource/Microsoft.Azure.Management.ResourceManager/Generated/Models/Deployment.cs
|
C#
|
apache-2.0
| 1,612 | 28.851852 | 90 | 0.612903 | false |
package com.puppetlabs.puppetserver;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingDeque;
/**
* A LinkedBlockingDeque that assumes the structure will be used as a Pool,
* where elements are recycled over time. Adds a <tt>registerLast</tt> method
* that can be used to register new elements when they are first introduced to
* the Pool, and a <tt>getRegisteredElements</tt> method that can be used to
* get a set of all of the known elements, regardless of whether they are
* currently available in the pool or not.
*
* @param <E> the type of element that can be added to the queue.
*/
public class RegisteredLinkedBlockingDeque<E> extends LinkedBlockingDeque<E> {
private final Set<E> registeredElements = new CopyOnWriteArraySet<>();
public RegisteredLinkedBlockingDeque(int capacity) {
super(capacity);
}
/**
* This method is analagous to <tt>putLast</tt> in the parent class, but
* also causes the element to be added to the list of "registered" elements
* that will be returned by <tt>getRegisteredInstances</tt>.
*
* Note that this method is synchronized to try to ensure that the addition
* to the queue and the list of registered instances are visible roughly
* atomically to consumers, but because the underlying queue uses
* its own lock, it is possible for it to be modified on another thread while
* this method is being executed.
*
* @param e the element to register and put at the end of the queue.
*/
synchronized public void registerLast(E e) throws InterruptedException {
registeredElements.add(e);
putLast(e);
}
/**
* @return a set of all of the known elements that have been registered with
* this queue.
*/
public Set<E> getRegisteredElements() {
return registeredElements;
}
}
|
kylog/puppet-server
|
src/java/com/puppetlabs/puppetserver/RegisteredLinkedBlockingDeque.java
|
Java
|
apache-2.0
| 1,930 | 38.387755 | 81 | 0.70829 | false |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.tools
import java.lang.{
Boolean => JBoolean,
Character => JChar,
Byte => JByte,
Short => JShort,
Integer => JInt,
Long => JLong,
Float => JFloat,
Double => JDouble
}
import org.objectweb.asm.Type
package object asm {
implicit class AugmentedType(val `type`: Type) extends AnyVal {
def boxed: Type = {
`type`.getSort() match {
case Type.BOOLEAN => classOf[JBoolean].asType
case Type.CHAR => classOf[JChar].asType
case Type.BYTE => classOf[JByte].asType
case Type.SHORT => classOf[JShort].asType
case Type.INT => classOf[JInt].asType
case Type.LONG => classOf[JLong].asType
case Type.FLOAT => classOf[JFloat].asType
case Type.DOUBLE => classOf[JDouble].asType
case _ => `type`
}
}
def isPrimitive: Boolean = {
isBoolean || isChar || isNumber
}
def isBoolean: Boolean = {
`type`.getSort() == Type.BOOLEAN
}
def isChar: Boolean = {
`type`.getSort() == Type.CHAR
}
def isInteger: Boolean = {
(`type`.getSort() == Type.BYTE
|| `type`.getSort() == Type.SHORT
|| `type`.getSort() == Type.INT)
}
def isLong: Boolean = {
`type`.getSort() == Type.LONG
}
def isFloat: Boolean = {
`type`.getSort() == Type.FLOAT
}
def isDouble: Boolean = {
`type`.getSort() == Type.DOUBLE
}
def isNumber: Boolean = {
isInteger || isLong || isFloat || isDouble
}
def isArray: Boolean = {
`type`.getSort() == Type.ARRAY
}
}
implicit class AsmClass[A](val cls: Class[A]) extends AnyVal {
def boxed: Class[_] = AsmClass.boxed.getOrElse(cls, cls)
def unboxed: Class[_] = AsmClass.unboxed.getOrElse(cls, cls)
def asType: Type = Type.getType(cls)
def asBoxedType: Type = boxed.asType
def asUnboxedType: Type = unboxed.asType
def getInternalName(): String = asType.getInternalName()
}
private object AsmClass {
val boxed = Map[Class[_], Class[_]](
classOf[Boolean] -> classOf[JBoolean],
classOf[Char] -> classOf[JChar],
classOf[Byte] -> classOf[JByte],
classOf[Short] -> classOf[JShort],
classOf[Int] -> classOf[JInt],
classOf[Long] -> classOf[JLong],
classOf[Float] -> classOf[JFloat],
classOf[Double] -> classOf[JDouble])
val unboxed = Map[Class[_], Class[_]](
classOf[JBoolean] -> classOf[Boolean],
classOf[JChar] -> classOf[Char],
classOf[JByte] -> classOf[Byte],
classOf[JShort] -> classOf[Short],
classOf[JInt] -> classOf[Int],
classOf[JLong] -> classOf[Long],
classOf[JFloat] -> classOf[Float],
classOf[JDouble] -> classOf[Double])
}
implicit val emptyAnnotationBuilderBlock =
ClassBuilder.emptyAnnotationBuilderBlock
implicit val emptyFieldBuilderBlock =
ClassBuilder.emptyFieldBuilderBlock
implicit val emptyTypeArgumentSignatureBuilderBlock =
TypeSignatureBuilder.emptyTypeArgumentSignatureBuilderBlock
}
|
ashigeru/asakusafw-spark
|
tools/asm/src/main/scala/com/asakusafw/spark/tools/asm/package.scala
|
Scala
|
apache-2.0
| 3,652 | 26.458647 | 75 | 0.639376 | false |
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* Hypertext Transfer Protocol (HTTP)
*
* Copyright 2012 Marc-Andre Moreau <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <winpr/crt.h>
#include <winpr/print.h>
#include <winpr/stream.h>
#include "http.h"
HttpContext* http_context_new()
{
HttpContext* http_context = (HttpContext*) malloc(sizeof(HttpContext));
if (http_context != NULL)
{
ZeroMemory(http_context, sizeof(HttpContext));
}
return http_context;
}
void http_context_set_method(HttpContext* http_context, char* method)
{
if (http_context->Method)
free(http_context->Method);
http_context->Method = _strdup(method);
}
void http_context_set_uri(HttpContext* http_context, char* uri)
{
if (http_context->URI)
free(http_context->URI);
http_context->URI = _strdup(uri);
}
void http_context_set_user_agent(HttpContext* http_context, char* user_agent)
{
if (http_context->UserAgent)
free(http_context->UserAgent);
http_context->UserAgent = _strdup(user_agent);
}
void http_context_set_host(HttpContext* http_context, char* host)
{
if (http_context->Host)
free(http_context->Host);
http_context->Host = _strdup(host);
}
void http_context_set_accept(HttpContext* http_context, char* accept)
{
if (http_context->Accept)
free(http_context->Accept);
http_context->Accept = _strdup(accept);
}
void http_context_set_cache_control(HttpContext* http_context, char* cache_control)
{
if (http_context->CacheControl)
free(http_context->CacheControl);
http_context->CacheControl = _strdup(cache_control);
}
void http_context_set_connection(HttpContext* http_context, char* connection)
{
if (http_context->Connection)
free(http_context->Connection);
http_context->Connection = _strdup(connection);
}
void http_context_set_pragma(HttpContext* http_context, char* pragma)
{
if (http_context->Pragma)
free(http_context->Pragma);
http_context->Pragma = _strdup(pragma);
}
void http_context_free(HttpContext* http_context)
{
if (http_context != NULL)
{
free(http_context->UserAgent);
free(http_context->Host);
free(http_context->URI);
free(http_context->Accept);
free(http_context->Method);
free(http_context->CacheControl);
free(http_context->Connection);
free(http_context->Pragma);
free(http_context);
}
}
void http_request_set_method(HttpRequest* http_request, char* method)
{
if (http_request->Method)
free(http_request->Method);
http_request->Method = _strdup(method);
}
void http_request_set_uri(HttpRequest* http_request, char* uri)
{
if (http_request->URI)
free(http_request->URI);
http_request->URI = _strdup(uri);
}
void http_request_set_auth_scheme(HttpRequest* http_request, char* auth_scheme)
{
if (http_request->AuthScheme)
free(http_request->AuthScheme);
http_request->AuthScheme = _strdup(auth_scheme);
}
void http_request_set_auth_param(HttpRequest* http_request, char* auth_param)
{
if (http_request->AuthParam)
free(http_request->AuthParam);
http_request->AuthParam = _strdup(auth_param);
}
char* http_encode_body_line(char* param, char* value)
{
char* line;
int length;
length = strlen(param) + strlen(value) + 2;
line = (char*) malloc(length + 1);
sprintf_s(line, length + 1, "%s: %s", param, value);
return line;
}
char* http_encode_content_length_line(int ContentLength)
{
char* line;
int length;
char str[32];
_itoa_s(ContentLength, str, sizeof(str), 10);
length = strlen("Content-Length") + strlen(str) + 2;
line = (char*) malloc(length + 1);
sprintf_s(line, length + 1, "Content-Length: %s", str);
return line;
}
char* http_encode_header_line(char* Method, char* URI)
{
char* line;
int length;
length = strlen("HTTP/1.1") + strlen(Method) + strlen(URI) + 2;
line = (char*) malloc(length + 1);
sprintf_s(line, length + 1, "%s %s HTTP/1.1", Method, URI);
return line;
}
char* http_encode_authorization_line(char* AuthScheme, char* AuthParam)
{
char* line;
int length;
length = strlen("Authorization") + strlen(AuthScheme) + strlen(AuthParam) + 3;
line = (char*) malloc(length + 1);
sprintf_s(line, length + 1, "Authorization: %s %s", AuthScheme, AuthParam);
return line;
}
wStream* http_request_write(HttpContext* http_context, HttpRequest* http_request)
{
int i;
wStream* s;
int length = 0;
http_request->count = 9;
http_request->lines = (char**) malloc(sizeof(char*) * http_request->count);
http_request->lines[0] = http_encode_header_line(http_request->Method, http_request->URI);
http_request->lines[1] = http_encode_body_line("Cache-Control", http_context->CacheControl);
http_request->lines[2] = http_encode_body_line("Connection", http_context->Connection);
http_request->lines[3] = http_encode_body_line("Pragma", http_context->Pragma);
http_request->lines[4] = http_encode_body_line("Accept", http_context->Accept);
http_request->lines[5] = http_encode_body_line("User-Agent", http_context->UserAgent);
http_request->lines[6] = http_encode_content_length_line(http_request->ContentLength);
http_request->lines[7] = http_encode_body_line("Host", http_context->Host);
if (http_request->Authorization != NULL)
{
http_request->lines[8] = http_encode_body_line("Authorization", http_request->Authorization);
}
else if ((http_request->AuthScheme != NULL) && (http_request->AuthParam != NULL))
{
http_request->lines[8] = http_encode_authorization_line(http_request->AuthScheme, http_request->AuthParam);
}
for (i = 0; i < http_request->count; i++)
{
length += (strlen(http_request->lines[i]) + 2); /* add +2 for each '\r\n' character */
}
length += 2; /* empty line "\r\n" at end of header */
length += 1; /* null terminator */
s = Stream_New(NULL, length);
for (i = 0; i < http_request->count; i++)
{
Stream_Write(s, http_request->lines[i], strlen(http_request->lines[i]));
Stream_Write(s, "\r\n", 2);
free(http_request->lines[i]);
}
Stream_Write(s, "\r\n", 2);
free(http_request->lines);
Stream_Write(s, "\0", 1); /* append null terminator */
Stream_Rewind(s, 1); /* don't include null terminator in length */
Stream_Length(s) = Stream_Position(s);
return s;
}
HttpRequest* http_request_new()
{
HttpRequest* http_request = (HttpRequest*) malloc(sizeof(HttpRequest));
if (http_request != NULL)
{
ZeroMemory(http_request, sizeof(HttpRequest));
}
return http_request;
}
void http_request_free(HttpRequest* http_request)
{
if (http_request != NULL)
{
free(http_request->AuthParam);
free(http_request->AuthScheme);
free(http_request->Authorization);
free(http_request->Content);
free(http_request->Method);
free(http_request->URI);
free(http_request);
}
}
void http_response_parse_header_status_line(HttpResponse* http_response, char* status_line)
{
char* separator;
char* status_code;
char* reason_phrase;
separator = strchr(status_line, ' ');
status_code = separator + 1;
separator = strchr(status_code, ' ');
reason_phrase = separator + 1;
*separator = '\0';
http_response->StatusCode = atoi(status_code);
http_response->ReasonPhrase = _strdup(reason_phrase);
*separator = ' ';
}
void http_response_parse_header_field(HttpResponse* http_response, char* name, char* value)
{
if (strcmp(name, "Content-Length") == 0)
{
http_response->ContentLength = atoi(value);
}
else if (strcmp(name, "Authorization") == 0)
{
char* separator;
http_response->Authorization = _strdup(value);
separator = strchr(value, ' ');
if (separator != NULL)
{
*separator = '\0';
http_response->AuthScheme = _strdup(value);
http_response->AuthParam = _strdup(separator + 1);
*separator = ' ';
}
}
else if (strcmp(name, "WWW-Authenticate") == 0)
{
char* separator;
separator = strstr(value, "=\"");
if (separator != NULL)
{
/* WWW-Authenticate: parameter with spaces="value" */
return;
}
separator = strchr(value, ' ');
if (separator != NULL)
{
/* WWW-Authenticate: NTLM base64token */
*separator = '\0';
http_response->AuthScheme = _strdup(value);
http_response->AuthParam = _strdup(separator + 1);
*separator = ' ';
return;
}
}
}
void http_response_parse_header(HttpResponse* http_response)
{
int count;
char* line;
char* name;
char* value;
char* separator;
http_response_parse_header_status_line(http_response, http_response->lines[0]);
for (count = 1; count < http_response->count; count++)
{
line = http_response->lines[count];
separator = strstr(line, ": ");
if (separator == NULL)
continue;
separator[0] = '\0';
separator[1] = '\0';
name = line;
value = separator + 2;
http_response_parse_header_field(http_response, name, value);
separator[0] = ':';
separator[1] = ' ';
}
}
void http_response_print(HttpResponse* http_response)
{
int i;
for (i = 0; i < http_response->count; i++)
{
printf("%s\n", http_response->lines[i]);
}
printf("\n");
}
HttpResponse* http_response_recv(rdpTls* tls)
{
BYTE* p;
int nbytes;
int length;
int status;
BYTE* buffer;
char* content;
char* header_end;
HttpResponse* http_response;
nbytes = 0;
length = 10000;
content = NULL;
buffer = malloc(length);
http_response = http_response_new();
p = buffer;
http_response->ContentLength = 0;
while (TRUE)
{
while (nbytes < 5)
{
status = tls_read(tls, p, length - nbytes);
if (status > 0)
{
nbytes += status;
p = (BYTE*) &buffer[nbytes];
}
else if (status == 0)
{
continue;
}
else
{
http_response_free(http_response);
return NULL;
}
}
header_end = strstr((char*) buffer, "\r\n\r\n");
if (header_end)
{
header_end += 2;
}
else
{
printf("http_response_recv: invalid response:\n");
winpr_HexDump(buffer, status);
http_response_free(http_response);
return NULL;
}
if (header_end != NULL)
{
int count;
char* line;
header_end[0] = '\0';
header_end[1] = '\0';
content = &header_end[2];
count = 0;
line = (char*) buffer;
while ((line = strstr(line, "\r\n")) != NULL)
{
line++;
count++;
}
http_response->count = count;
http_response->lines = (char**) malloc(sizeof(char*) * http_response->count);
count = 0;
line = strtok((char*) buffer, "\r\n");
while (line != NULL)
{
http_response->lines[count] = _strdup(line);
line = strtok(NULL, "\r\n");
count++;
}
http_response_parse_header(http_response);
if (http_response->ContentLength > 0)
{
http_response->Content = _strdup(content);
}
break;
}
if ((length - nbytes) <= 0)
{
length *= 2;
buffer = realloc(buffer, length);
p = (BYTE*) &buffer[nbytes];
}
}
free(buffer);
return http_response;
}
HttpResponse* http_response_new()
{
HttpResponse* http_response;
http_response = (HttpResponse*) malloc(sizeof(HttpResponse));
if (http_response != NULL)
{
ZeroMemory(http_response, sizeof(HttpResponse));
}
return http_response;
}
void http_response_free(HttpResponse* http_response)
{
int i;
if (http_response != NULL)
{
for (i = 0; i < http_response->count; i++)
free(http_response->lines[i]);
free(http_response->lines);
free(http_response->ReasonPhrase);
free(http_response->AuthParam);
free(http_response->AuthScheme);
free(http_response->Authorization);
if (http_response->ContentLength > 0)
free(http_response->Content);
free(http_response);
}
}
|
woshipike00/FreeRDP
|
libfreerdp/core/gateway/http.c
|
C
|
apache-2.0
| 11,935 | 21.101852 | 109 | 0.665941 | false |
/**
* Created with IntelliJ IDEA.
* User: Mateusz
* Date: 14.11.12
* Time: 18:58
*/
'use strict';
require.config({
baseUrl:'js',
paths:{
text:'../lib/require/text',
jquery:'../lib/jquery/jquery',
angular:'../lib/angular/angular'
},
shim:{
'angular':{
exports:'angular'
}
},
priority:[
'angular'
],
urlArgs:'v=1.1'
});
require([
'angular',
'text',
'jquery',
'app',
'routes'
], function (angular) {
//This function will be called when all the dependencies
//listed above are loaded. Note that this function could
//be called before the page is loaded.
//This callback is optional.
$(document).ready(function () {
angular.bootstrap(document, ['myApp']);
});
});
|
lizouzt/lizouzt.github.com
|
gitbook/donut/angular/blog/app/js/main.js
|
JavaScript
|
apache-2.0
| 811 | 17.883721 | 60 | 0.543773 | false |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GEO_EARTH_ENTERPRISE_SRC_FUSION_AUTOINGEST_ASSETHANDLE_H_
#define GEO_EARTH_ENTERPRISE_SRC_FUSION_AUTOINGEST_ASSETHANDLE_H_
#include <string>
#include "common/khRefCounter.h"
#include "common/khCache.h"
#include "common/khTypes.h"
class AssetVersionRef;
/******************************************************************************
*** AssetHandle templates (Private Implementation)
***
*** These templates implement the functionality for the handles to
*** AssetImpl's and AssetVersionImpl's.
***
*** The code should never use the various Impl classes directly, but should
*** always use these handle classes instead. As the trailing '_' in the
*** name suggests, these classes are not intended to be used
*** directly. Asset.h and AssetVersion.h declare typedefs to be used.
***
*** typedef AssetHandle_<AssetImpl> Asset;
*** typedef AssetHandle_<AssetVersionImpl> AssetVersion;
******************************************************************************/
template <class Impl_>
class AssetHandle_ {
friend class Impl;
public:
typedef Impl_ Impl;
typedef khRefGuard<Impl> HandleType;
protected:
static inline khCache<std::string, HandleType>& cache(void);
public:
static uint32 CacheSize(void) { return cache().size(); }
// Adds handle-object to cache.
void CacheAdd() {
assert(handle);
cache().Add(Ref(), handle);
}
// Removes handle-object from cache.
void CacheRemove() {
cache().Remove(Ref());
}
// Only implemented/used by Version variant.
// Loads asset version from file without caching.
void BindNoCache() const;
protected:
static const bool check_timestamps;
std::string ref;
mutable HandleType handle;
// Only implemented/used by Asset variant.
void DoBind(const std::string &ref,
bool checkFileExistenceFirst) const;
// Only implemented/used by Version variant.
template <int do_cache>
void DoBind(const std::string &boundRef,
const AssetVersionRef &boundVerRef,
bool checkFileExistenceFirst,
Int2Type<do_cache> do_cache_val) const;
// Only implemented/used by Version variant.
void DoBind(const std::string &boundRef,
const AssetVersionRef &boundVerRef,
bool checkFileExistenceFirst) const;
// Implemented by both Asset & Version variants.
void Bind(void) const;
// Allows subclasses to do extra work.
virtual void OnBind(const std::string &boundref) const { }
virtual HandleType CacheFind(const std::string &boundref) const {
HandleType entry;
(void)cache().Find(boundref, entry);
return entry;
}
virtual HandleType Load(const std::string &boundref) const {
return HandleType(Impl::Load(boundref));
}
public:
AssetHandle_(void) : ref(), handle() { }
AssetHandle_(const std::string &ref_) : ref(ref_), handle() { }
// the compiler generated assignment and copy constructor are fine for us
// ref & handle have stable copy semantics and we don't have to worry about
// adding to the cache because the src object will already have done that
virtual ~AssetHandle_(void) { }
std::string Ref(void) const { return ref; }
bool Valid(void) const;
operator bool(void) const { return Valid(); }
const Impl* operator->(void) const {
Bind();
return handle.operator->();
}
// needed so AssetHandle_ can be put in a set
bool operator<(const AssetHandle_ &o) const {
return ref < o.ref;
}
// if the ref's are equal then the cache semantics of this handle
// guarantees that the two handles point to the same memory.
bool operator==(const AssetHandle_ &o) const {
return ref == o.ref;
}
};
/******************************************************************************
*** Derived ReadOnly Handles
******************************************************************************/
template <class Base_, class Impl_>
class DerivedAssetHandle_ : public virtual Base_ {
public:
typedef Impl_ Impl;
typedef Base_ Base;
typedef typename Base::HandleType HandleType;
protected:
virtual HandleType CacheFind(const std::string &boundref) const {
HandleType entry;
if (this->cache().Find(boundref, entry)) {
// we have to check if it maps to Impl* since somebody
// else may have put it in the cache
if (!dynamic_cast<Impl*>(&*entry)) {
entry = HandleType();
}
}
return entry;
}
virtual HandleType Load(const std::string &boundref) const {
// Impl::Load will succeed or throw.
// The derived khRefGuard will be automatically converted
// the the base khRefGuard
return HandleType(Impl::Load(boundref));
}
public:
DerivedAssetHandle_(void) : Base() { }
DerivedAssetHandle_(const std::string &ref_) : Base(ref_) { }
// it's OK to construct a derived from a base, we just check first
// and clear the handle if the types don't match
DerivedAssetHandle_(const Base &o) : Base(o) {
if (this->handle &&
!dynamic_cast<const Impl*>(this->handle.operator->())) {
this->handle = HandleType();
}
}
// the compiler generated assignment and copy constructor are fine for us
// we have no addition members or semantics to maintain
const Impl* operator->(void) const {
this->Bind();
// we ensure that the base class handle always points to our derived
// type so this dynamic cast should never fail. but it needs to be
// dynamic instead of static since we're casting from a virtual base
return dynamic_cast<const Impl*>(this->handle.operator->());
}
};
#endif // GEO_EARTH_ENTERPRISE_SRC_FUSION_AUTOINGEST_ASSETHANDLE_H_
|
tst-ahernandez/earthenterprise
|
earth_enterprise/src/fusion/autoingest/AssetHandle.h
|
C
|
apache-2.0
| 6,237 | 32 | 80 | 0.656245 | false |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ProhibitAWTEvents;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.util.Disposer;
import com.intellij.project.TestProjectManager;
import com.intellij.util.PairProcessor;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.io.PersistentEnumeratorBase;
import com.intellij.util.io.PersistentEnumeratorCache;
import com.intellij.util.ref.DebugReflectionUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Vector;
import java.util.function.Predicate;
import java.util.function.Supplier;
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public final class LeakHunter {
@TestOnly
public static void checkProjectLeak() throws AssertionError {
checkLeak(allRoots(), ProjectImpl.class, project -> !project.isDefault() && !project.isLight());
}
@TestOnly
public static void checkNonDefaultProjectLeak() {
checkLeak(allRoots(), ProjectImpl.class, project -> !project.isDefault());
}
@TestOnly
public static void checkLeak(@NotNull Object root, @NotNull Class<?> suspectClass) throws AssertionError {
checkLeak(root, suspectClass, null);
}
/**
* Checks if there is a memory leak if an object of type {@code suspectClass} is strongly accessible via references from the {@code root} object.
*/
@TestOnly
public static <T> void checkLeak(@NotNull Supplier<? extends Map<Object, String>> rootsSupplier,
@NotNull Class<T> suspectClass,
@Nullable Predicate<? super T> isReallyLeak) throws AssertionError {
processLeaks(rootsSupplier, suspectClass, isReallyLeak, (leaked, backLink)->{
String place = leaked instanceof Project ? TestProjectManager.getCreationPlace((Project)leaked) : "";
String message ="Found leaked "+leaked.getClass() + ": "+leaked +
"; hash: " + System.identityHashCode(leaked) + "; place: " + place + "\n" +
backLink;
System.out.println(message);
System.out.println(";-----");
UsefulTestCase.printThreadDump();
throw new AssertionError(message);
});
}
/**
* Checks if there is a memory leak if an object of type {@code suspectClass} is strongly accessible via references from the {@code root} object.
*/
@TestOnly
public static <T> void processLeaks(@NotNull Supplier<? extends Map<Object, String>> rootsSupplier,
@NotNull Class<T> suspectClass,
@Nullable Predicate<? super T> isReallyLeak,
@NotNull PairProcessor<? super T, Object> processor) throws AssertionError {
if (SwingUtilities.isEventDispatchThread()) {
UIUtil.dispatchAllInvocationEvents();
}
else {
UIUtil.pump();
}
PersistentEnumeratorCache.clearCacheForTests();
Runnable runnable = () -> {
try (AccessToken ignored = ProhibitAWTEvents.start("checking for leaks")) {
DebugReflectionUtil.walkObjects(10000, rootsSupplier.get(), suspectClass, __ -> true, (leaked, backLink) -> {
if (isReallyLeak == null || isReallyLeak.test(leaked)) {
return processor.process(leaked, backLink);
}
return true;
});
}
};
Application application = ApplicationManager.getApplication();
if (application == null) {
runnable.run();
}
else {
application.runReadAction(runnable);
}
}
/**
* Checks if there is a memory leak if an object of type {@code suspectClass} is strongly accessible via references from the {@code root} object.
*/
@TestOnly
public static <T> void checkLeak(@NotNull Object root, @NotNull Class<T> suspectClass, @Nullable Predicate<? super T> isReallyLeak) throws AssertionError {
checkLeak(() -> Collections.singletonMap(root, "Root object"), suspectClass, isReallyLeak);
}
public static @NotNull Supplier<Map<Object, String>> allRoots() {
return () -> {
ClassLoader classLoader = LeakHunter.class.getClassLoader();
// inspect static fields of all loaded classes
@SuppressWarnings("UseOfObsoleteCollectionType")
Vector<?> allLoadedClasses = ReflectionUtil.getField(classLoader.getClass(), classLoader, Vector.class, "classes");
// Remove expired invocations, so they are not used as object roots.
LaterInvocator.purgeExpiredItems();
Map<Object, String> result = new IdentityHashMap<>();
Application application = ApplicationManager.getApplication();
if (application != null) {
result.put(application, "ApplicationManager.getApplication()");
}
result.put(Disposer.getTree(), "Disposer.getTree()");
result.put(IdeEventQueue.getInstance(), "IdeEventQueue.getInstance()");
result.put(LaterInvocator.getLaterInvocatorEdtQueue(), "LaterInvocator.getLaterInvocatorEdtQueue()");
result.put(LaterInvocator.getLaterInvocatorWtQueue(), "LaterInvocator.getLaterInvocatorWtQueue()");
result.put(ThreadTracker.getThreads().values(), "all live threads");
if (allLoadedClasses != null) {
result.put(allLoadedClasses, "all loaded classes statics");
}
return result;
};
}
}
|
siosio/intellij-community
|
platform/testFramework/src/com/intellij/testFramework/LeakHunter.java
|
Java
|
apache-2.0
| 5,956 | 42.474453 | 157 | 0.699799 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.replication;
/**
* The peer already exists
*/
public class PeerNotFoundException extends Exception {
private static final long serialVersionUID = 1L;
public PeerNotFoundException(String peer) {
this(peer, (String) null);
}
public PeerNotFoundException(String peer, String message) {
super("Peer '" + peer + "' not found " + (message == null || message.isEmpty() ? "" : message));
}
public PeerNotFoundException(String message, Throwable cause) {
super(message, cause);
}
public PeerNotFoundException(String peer, String message, Throwable cause) {
super("Peer '" + peer + "' not found " + message, cause);
}
}
|
lstav/accumulo
|
core/src/main/java/org/apache/accumulo/core/client/replication/PeerNotFoundException.java
|
Java
|
apache-2.0
| 1,499 | 34.690476 | 100 | 0.724483 | false |
package org.jboss.resteasy.cdi.events;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.logging.Logger;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import org.jboss.resteasy.cdi.util.Constants;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
/**
* @author <a href="[email protected]">Ron Sigal</a>
* @version $Revision: 1.1 $
*
* Copyright May 8, 2012
*/
@Provider
@Produces(Constants.MEDIA_TYPE_TEST_XML)
public class BookWriter implements MessageBodyWriter<Book>
{
static private MessageBodyWriter<Book> delegate;
@Inject @Write(context="writer") Event<String> writeEvent;
@Inject private Logger log;
static
{
System.out.println("In BookWriter static {}");
ResteasyProviderFactory factory = ResteasyProviderFactory.getInstance();
delegate = factory.getMessageBodyWriter(Book.class, null, null, Constants.MEDIA_TYPE_TEST_XML_TYPE);
}
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType)
{
log.info("entering BookWriter.isWriteable()");
boolean b = Book.class.equals(type);
log.info("leaving BookWriter.isWriteable()");
return b;
}
public long getSize(Book t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType)
{
log.info("entering BookWriter.getSize()");
log.info("leaving BookWriter.getSize()");
return -1;
}
@Override
public void writeTo(Book t, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream)
throws IOException, WebApplicationException
{
log.info("entering BookWriter.writeTo()");
log.info("BookWriter.writeTo() writing " + t);
delegate.writeTo(t, type, genericType, annotations, mediaType, httpHeaders, entityStream);
log.info("BookWriter firing writeEvent");
writeEvent.fire("writeEvent");
log.info("leaving BookWriter.writeTo()");
}
}
|
raphaelning/resteasy-client-android
|
jaxrs/arquillian/resteasy-cdi-ejb-test/src/main/java/org/jboss/resteasy/cdi/events/BookWriter.java
|
Java
|
apache-2.0
| 2,373 | 31.958333 | 110 | 0.72145 | false |
/**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.admin.client.mvp.view.dialog;
import org.kaaproject.avro.ui.gwt.client.widget.AlertPanel;
import org.kaaproject.avro.ui.gwt.client.widget.dialog.AvroUiDialog;
import org.kaaproject.kaa.server.admin.client.util.Utils;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.VerticalPanel;
public class UnauthorizedSessionDialog extends AvroUiDialog {
public UnauthorizedSessionDialog(final Listener listener) {
super(false, true);
setWidth("500px");
setTitle(Utils.constants.sessionExpired());
VerticalPanel dialogContents = new VerticalPanel();
dialogContents.setSpacing(4);
add(dialogContents);
AlertPanel warningPanel = new AlertPanel(AlertPanel.Type.WARNING);
warningPanel.setMessage(Utils.messages.sessionExpiredMessage());
dialogContents.add(warningPanel);
Button loginButton = new Button(Utils.constants.logInAgain(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
hide();
listener.onLogin();
}
});
Button ignoreButton = new Button(Utils.constants.ignore(), new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
hide();
listener.onIgnore();
}
});
addButton(loginButton);
addButton(ignoreButton);
}
public interface Listener {
public void onLogin();
public void onIgnore();
}
}
|
Deepnekroz/kaa
|
server/node/src/main/java/org/kaaproject/kaa/server/admin/client/mvp/view/dialog/UnauthorizedSessionDialog.java
|
Java
|
apache-2.0
| 2,399 | 31.418919 | 90 | 0.651105 | false |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
namespace Cassandra
{
/// <summary>
/// A retry policy that wraps another policy, logging the decision made by its sub-policy.
/// <para>
/// Note that this policy only log the <c>Ignore</c> and <c>Retry</c> decisions (since <c>Rethrow</c>
/// decisions just amount to propagate the cassandra exception). The logging is done at the <c>Info</c> level.
/// </para>
/// </summary>
public class LoggingRetryPolicy : IExtendedRetryPolicy
{
private readonly Logger _logger = new Logger(typeof (LoggingRetryPolicy));
private readonly IExtendedRetryPolicy _extendedPolicy;
/// <summary>
/// Creates a new <see cref="IExtendedRetryPolicy"/> that logs the decision of the provided <c>policy</c>.
/// </summary>
/// <param name="policy"> the policy to wrap. The policy created by this
/// constructor will return the same decision than <c>policy</c> but will log them.</param>
public LoggingRetryPolicy(IRetryPolicy policy)
{
ChildPolicy = policy;
// Use the provided policy for extended policy methods.
// If the provided policy is not IExtendedRetryPolicy, use the default.
_extendedPolicy = (policy as IExtendedRetryPolicy) ?? new DefaultRetryPolicy();
}
public IRetryPolicy ChildPolicy { get; }
public RetryDecision OnReadTimeout(IStatement query, ConsistencyLevel cl, int requiredResponses, int receivedResponses, bool dataRetrieved,
int nbRetry)
{
RetryDecision decision = ChildPolicy.OnReadTimeout(query, cl, requiredResponses, receivedResponses, dataRetrieved, nbRetry);
switch (decision.DecisionType)
{
case RetryDecision.RetryDecisionType.Ignore:
_logger.Info(
string.Format(
"Ignoring read timeout (initial consistency: {0}, required responses: {1}, received responses: {2}, data retrieved: {3}, retries: {4})",
cl, requiredResponses, receivedResponses, dataRetrieved, nbRetry));
break;
case RetryDecision.RetryDecisionType.Retry:
_logger.Info(
string.Format(
"Retrying on read timeout at consistency {0} (initial consistency: {1}, required responses: {2}, received responses: {3}, data retrieved: {4}, retries: {5})",
CL(cl, decision), cl, requiredResponses, receivedResponses, dataRetrieved, nbRetry));
break;
}
return decision;
}
public RetryDecision OnWriteTimeout(IStatement query, ConsistencyLevel cl, string writeType, int requiredAcks, int receivedAcks, int nbRetry)
{
RetryDecision decision = ChildPolicy.OnWriteTimeout(query, cl, writeType, requiredAcks, receivedAcks, nbRetry);
switch (decision.DecisionType)
{
case RetryDecision.RetryDecisionType.Ignore:
_logger.Info(
string.Format(
"Ignoring write timeout (initial consistency: {0}, write type: {1} required acknowledgments: {2}, received acknowledgments: {3}, retries: {4})",
cl, writeType, requiredAcks, receivedAcks, nbRetry));
break;
case RetryDecision.RetryDecisionType.Retry:
_logger.Info(
string.Format(
"Retrying on write timeout at consistency {0}(initial consistency: {1}, write type: {2}, required acknowledgments: {3}, received acknowledgments: {4}, retries: {5})",
CL(cl, decision), cl, writeType, requiredAcks, receivedAcks, nbRetry));
break;
}
return decision;
}
public RetryDecision OnUnavailable(IStatement query, ConsistencyLevel cl, int requiredReplica, int aliveReplica, int nbRetry)
{
RetryDecision decision = ChildPolicy.OnUnavailable(query, cl, requiredReplica, aliveReplica, nbRetry);
switch (decision.DecisionType)
{
case RetryDecision.RetryDecisionType.Ignore:
_logger.Info(
string.Format(
"Ignoring unavailable exception (initial consistency: {0}, required replica: {1}, alive replica: {2}, retries: {3})", cl,
requiredReplica, aliveReplica, nbRetry));
break;
case RetryDecision.RetryDecisionType.Retry:
_logger.Info(
string.Format(
"Retrying on unavailable exception at consistency {0} (initial consistency: {1}, required replica: {2}, alive replica: {3}, retries: {4})",
CL(cl, decision), cl, requiredReplica, aliveReplica, nbRetry));
break;
}
return decision;
}
private static ConsistencyLevel CL(ConsistencyLevel cl, RetryDecision decision)
{
return decision.RetryConsistencyLevel ?? cl;
}
/// <inheritdoc />
public RetryDecision OnRequestError(IStatement statement, Configuration config, Exception ex, int nbRetry)
{
var decision = _extendedPolicy.OnRequestError(statement, config, ex, nbRetry);
switch (decision.DecisionType)
{
case RetryDecision.RetryDecisionType.Ignore:
_logger.Info("Ignoring on request error(retries: {0}, exception: {1})", nbRetry, ex);
break;
case RetryDecision.RetryDecisionType.Retry:
_logger.Info("Retrying on request error (retries: {0}, exception: {1})", nbRetry, ex);
break;
}
return decision;
}
}
}
|
mintsoft/csharp-driver
|
src/Cassandra/Policies/LoggingRetryPolicy.cs
|
C#
|
apache-2.0
| 6,697 | 48.977612 | 194 | 0.588323 | false |
//
// VideoPlayerViewController.h
// PLPlayerKitDemo
//
// Created by 0day on 15/5/7.
// Copyright (c) 2015年 Pili Engineering, Qiniu Inc. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface VideoPlayerViewController : UIViewController
@property (nonatomic, copy) NSURL *url;
@property (nonatomic, copy) NSDictionary *parameters;
@property (weak, nonatomic) IBOutlet UIButton *actionButton;
- (instancetype)initWithURL:(NSURL *)url parameters:(NSDictionary *)parameters;
@end
|
raykle/PLPlayerKit
|
Example/PLPlayerKit/VideoPlayerViewController.h
|
C
|
apache-2.0
| 498 | 25.105263 | 79 | 0.743952 | false |
#!/usr/bin/env python3
###############################################################################
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import math
import numpy as np
class LatAcceleration:
def __init__(self):
self.centripetal_accel_list = []
self.centripetal_jerk_list = []
def put(self, adc_trajectory):
init_point = adc_trajectory.debug.planning_data.init_point
# centripetal_jerk
centripetal_jerk = 2 * init_point.v * init_point.a \
* init_point.path_point.kappa + init_point.v \
* init_point.v * init_point.path_point.dkappa
if not math.isnan(centripetal_jerk):
self.centripetal_jerk_list.append(centripetal_jerk)
# centripetal_accel
centripetal_accel = init_point.v * init_point.v \
* init_point.path_point.kappa
if not math.isnan(centripetal_accel):
self.centripetal_accel_list.append(centripetal_accel)
def get_acceleration(self):
# [1, 2) [-2, -1)
LAT_ACCEL_M_LB_P = 1
LAT_ACCEL_M_UB_P = 2
LAT_ACCEL_M_LB_N = -2
LAT_ACCEL_M_UB_N = -1
lat_accel_medium_cnt = 0
# [2, inf) [-inf,-2)
LAT_ACCEL_H_LB_P = 2
LAT_ACCEL_H_UB_N = -2
lat_accel_high_cnt = 0
for centripetal_accel in self.centripetal_accel_list:
if LAT_ACCEL_M_LB_P <= centripetal_accel < LAT_ACCEL_M_UB_P \
or LAT_ACCEL_M_LB_N < centripetal_accel <= LAT_ACCEL_M_UB_N:
lat_accel_medium_cnt += 1
if centripetal_accel >= LAT_ACCEL_H_LB_P \
or centripetal_accel <= LAT_ACCEL_H_UB_N:
lat_accel_high_cnt += 1
# centripetal_accel
lat_accel = {}
if len(self.centripetal_accel_list) > 0:
lat_accel["max"] = abs(max(self.centripetal_accel_list, key=abs))
accel_avg = np.average(np.absolute(self.centripetal_accel_list))
lat_accel["avg"] = accel_avg
else:
lat_accel["max"] = 0
lat_accel["avg"] = 0
lat_accel["medium_cnt"] = lat_accel_medium_cnt
lat_accel["high_cnt"] = lat_accel_high_cnt
return lat_accel
def get_jerk(self):
# [0.5,1) [-1, -0.5)
LAT_JERK_M_LB_P = 0.5
LAT_JERK_M_UB_P = 1
LAT_JERK_M_LB_N = -1
LAT_JERK_M_UB_N = -0.5
lat_jerk_medium_cnt = 0
# [1, inf) [-inf,-1)
LAT_JERK_H_LB_P = 1
LAT_JERK_H_UB_N = -1
lat_jerk_high_cnt = 0
for centripetal_jerk in self.centripetal_jerk_list:
if LAT_JERK_M_LB_P <= centripetal_jerk < LAT_JERK_M_UB_P \
or LAT_JERK_M_LB_N < centripetal_jerk <= LAT_JERK_M_UB_N:
lat_jerk_medium_cnt += 1
if centripetal_jerk >= LAT_JERK_H_LB_P \
or centripetal_jerk <= LAT_JERK_H_UB_N:
lat_jerk_high_cnt += 1
# centripetal_jerk
lat_jerk = {}
if len(self.centripetal_jerk_list) > 0:
lat_jerk["max"] = abs(max(self.centripetal_jerk_list, key=abs))
jerk_avg = np.average(np.absolute(self.centripetal_jerk_list))
lat_jerk["avg"] = jerk_avg
else:
lat_jerk["max"] = 0
lat_jerk["avg"] = 0
lat_jerk["medium_cnt"] = lat_jerk_medium_cnt
lat_jerk["high_cnt"] = lat_jerk_high_cnt
return lat_jerk
|
jinghaomiao/apollo
|
modules/tools/record_analyzer/metrics/lat_acceleration.py
|
Python
|
apache-2.0
| 4,081 | 35.115044 | 80 | 0.549865 | false |
/*
* pg_crc.h
*
* PostgreSQL CRC support
*
* See Ross Williams' excellent introduction
* A PAINLESS GUIDE TO CRC ERROR DETECTION ALGORITHMS, available from
* http://www.ross.net/crc/ or several other net sites.
*
* We use a normal (not "reflected", in Williams' terms) CRC, using initial
* all-ones register contents and a final bit inversion.
*
* The 64-bit variant is not used as of PostgreSQL 8.1, but we retain the
* code for possible future use.
*
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $PostgreSQL: pgsql/src/include/utils/pg_crc.h,v 1.24 2010/02/26 02:01:29 momjian Exp $
*/
#ifndef PG_CRC_H
#define PG_CRC_H
/* ugly hack to let this be used in frontend and backend code on Cygwin */
#ifdef FRONTEND
#define CRCDLLIMPORT
#else
#define CRCDLLIMPORT PGDLLIMPORT
#endif
typedef uint32 pg_crc32;
/* Initialize a CRC accumulator */
#define INIT_CRC32(crc) ((crc) = 0xFFFFFFFF)
/* Finish a CRC calculation */
#define FIN_CRC32(crc) ((crc) ^= 0xFFFFFFFF)
/* Accumulate some (more) bytes into a CRC */
#define COMP_CRC32(crc, data, len) \
do { \
unsigned char *__data = (unsigned char *) (data); \
uint32 __len = (len); \
\
while (__len-- > 0) \
{ \
int __tab_index = ((int) ((crc) >> 24) ^ *__data++) & 0xFF; \
(crc) = pg_crc32_table[__tab_index] ^ ((crc) << 8); \
} \
} while (0)
/* Check for equality of two CRCs */
#define EQ_CRC32(c1,c2) ((c1) == (c2))
/* Constant table for CRC calculation */
extern CRCDLLIMPORT const uint32 pg_crc32_table[];
#ifdef PROVIDE_64BIT_CRC
/*
* If we use a 64-bit integer type, then a 64-bit CRC looks just like the
* usual sort of implementation. However, we can also fake it with two
* 32-bit registers. Experience has shown that the two-32-bit-registers code
* is as fast as, or even much faster than, the 64-bit code on all but true
* 64-bit machines. We use SIZEOF_VOID_P to check the native word width.
*/
#if SIZEOF_VOID_P < 8
/*
* crc0 represents the LSBs of the 64-bit value, crc1 the MSBs. Note that
* with crc0 placed first, the output of 32-bit and 64-bit implementations
* will be bit-compatible only on little-endian architectures. If it were
* important to make the two possible implementations bit-compatible on
* all machines, we could do a configure test to decide how to order the
* two fields, but it seems not worth the trouble.
*/
typedef struct pg_crc64
{
uint32 crc0;
uint32 crc1;
} pg_crc64;
/* Initialize a CRC accumulator */
#define INIT_CRC64(crc) ((crc).crc0 = 0xffffffff, (crc).crc1 = 0xffffffff)
/* Finish a CRC calculation */
#define FIN_CRC64(crc) ((crc).crc0 ^= 0xffffffff, (crc).crc1 ^= 0xffffffff)
/* Accumulate some (more) bytes into a CRC */
#define COMP_CRC64(crc, data, len) \
do { \
uint32 __crc0 = (crc).crc0; \
uint32 __crc1 = (crc).crc1; \
unsigned char *__data = (unsigned char *) (data); \
uint32 __len = (len); \
\
while (__len-- > 0) \
{ \
int __tab_index = ((int) (__crc1 >> 24) ^ *__data++) & 0xFF; \
__crc1 = pg_crc64_table1[__tab_index] ^ ((__crc1 << 8) | (__crc0 >> 24)); \
__crc0 = pg_crc64_table0[__tab_index] ^ (__crc0 << 8); \
} \
(crc).crc0 = __crc0; \
(crc).crc1 = __crc1; \
} while (0)
/* Check for equality of two CRCs */
#define EQ_CRC64(c1,c2) ((c1).crc0 == (c2).crc0 && (c1).crc1 == (c2).crc1)
/* Constant table for CRC calculation */
extern CRCDLLIMPORT const uint32 pg_crc64_table0[];
extern CRCDLLIMPORT const uint32 pg_crc64_table1[];
#else /* use int64 implementation */
typedef struct pg_crc64
{
uint64 crc0;
} pg_crc64;
/* Initialize a CRC accumulator */
#define INIT_CRC64(crc) ((crc).crc0 = UINT64CONST(0xffffffffffffffff))
/* Finish a CRC calculation */
#define FIN_CRC64(crc) ((crc).crc0 ^= UINT64CONST(0xffffffffffffffff))
/* Accumulate some (more) bytes into a CRC */
#define COMP_CRC64(crc, data, len) \
do { \
uint64 __crc0 = (crc).crc0; \
unsigned char *__data = (unsigned char *) (data); \
uint32 __len = (len); \
\
while (__len-- > 0) \
{ \
int __tab_index = ((int) (__crc0 >> 56) ^ *__data++) & 0xFF; \
__crc0 = pg_crc64_table[__tab_index] ^ (__crc0 << 8); \
} \
(crc).crc0 = __crc0; \
} while (0)
/* Check for equality of two CRCs */
#define EQ_CRC64(c1,c2) ((c1).crc0 == (c2).crc0)
/* Constant table for CRC calculation */
extern CRCDLLIMPORT const uint64 pg_crc64_table[];
#endif /* SIZEOF_VOID_P < 8 */
#endif /* PROVIDE_64BIT_CRC */
#endif /* PG_CRC_H */
|
bubichain/blockchain
|
src/3rd/win32/include/postgresql/server/utils/pg_crc.h
|
C
|
apache-2.0
| 4,514 | 28.697368 | 89 | 0.651086 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.traits;
import java.io.Serializable;
/**
* This class holds the resolved (as mpoints) form of a
* {@link org.apache.fop.fo.properties.LengthRangeProperty LengthRange} or
* {@link org.apache.fop.fo.properties.SpaceProperty Space} type property value. <p>
* Instances of this class are immutable. All arithmetic methods like
* {@link #plus(MinOptMax) plus}, {@link #minus(MinOptMax) minus} or {@link #mult(int)
* mult} return a different instance. So it is possible to pass around instances without
* copying. <p> <code>MinOptMax</code> values are used during layout calculations.
*/
public final class MinOptMax implements Serializable {
private static final long serialVersionUID = -4791524475122206142L;
/**
* The zero <code>MinOptMax</code> instance with <code>min == opt == max == 0</code>.
*/
public static final MinOptMax ZERO = getInstance(0);
private final int min;
private final int opt;
private final int max;
/**
* Returns an instance of <code>MinOptMax</code> with the given values.
*
* @param min the minimum value
* @param opt the optimum value
* @param max the maximum value
* @return the corresponding instance
* @throws IllegalArgumentException if <code>min > opt || max < opt</code>.
*/
public static MinOptMax getInstance(int min, int opt, int max) throws IllegalArgumentException {
if (min > opt) {
throw new IllegalArgumentException("min (" + min + ") > opt (" + opt + ")");
}
if (max < opt) {
throw new IllegalArgumentException("max (" + max + ") < opt (" + opt + ")");
}
return new MinOptMax(min, opt, max);
}
/**
* Returns an instance of <code>MinOptMax</code> with one fixed value for all three
* properties (min, opt, max).
*
* @param value the value for min, opt and max
* @return the corresponding instance
* @see #isStiff()
*/
public static MinOptMax getInstance(int value) {
return new MinOptMax(value, value, value);
}
// Private constructor without consistency checks
private MinOptMax(int min, int opt, int max) {
assert min <= opt && opt <= max;
this.min = min;
this.opt = opt;
this.max = max;
}
/**
* Returns the minimum value of this <code>MinOptMax</code>.
*
* @return the minimum value of this <code>MinOptMax</code>.
*/
public int getMin() {
return min;
}
/**
* Returns the optimum value of this <code>MinOptMax</code>.
*
* @return the optimum value of this <code>MinOptMax</code>.
*/
public int getOpt() {
return opt;
}
/**
* Returns the maximum value of this <code>MinOptMax</code>.
*
* @return the maximum value of this <code>MinOptMax</code>.
*/
public int getMax() {
return max;
}
/**
* Returns the shrinkability of this <code>MinOptMax</code> which is the absolute difference
* between <code>min</code> and <code>opt</code>.
*
* @return the shrinkability of this <code>MinOptMax</code> which is always non-negative.
*/
public int getShrink() {
return opt - min;
}
/**
* Returns the stretchability of this <code>MinOptMax</code> which is the absolute difference
* between <code>opt</code> and <code>max</code>.
*
* @return the stretchability of this <code>MinOptMax</code> which is always non-negative.
*/
public int getStretch() {
return max - opt;
}
/**
* Returns the sum of this <code>MinOptMax</code> and the given <code>MinOptMax</code>.
*
* @param operand the second operand of the sum (the first is this instance itself),
* @return the sum of this <code>MinOptMax</code> and the given <code>MinOptMax</code>.
*/
public MinOptMax plus(MinOptMax operand) {
return new MinOptMax(min + operand.min, opt + operand.opt, max + operand.max);
}
/**
* Adds the given value to all three components of this instance and returns the result.
*
* @param value value to add to the min, opt, max components
* @return the result of the addition
*/
public MinOptMax plus(int value) {
return new MinOptMax(min + value, opt + value, max + value);
}
/**
* Returns the difference of this <code>MinOptMax</code> and the given
* <code>MinOptMax</code>. This instance must be a compound of the operand and another
* <code>MinOptMax</code>, that is, there must exist a <code>MinOptMax</code> <i>m</i>
* such that <code>this.equals(m.plus(operand))</code>. In other words, the operand
* must have less shrink and stretch than this instance.
*
* @param operand the value to be subtracted
* @return the difference of this <code>MinOptMax</code> and the given
* <code>MinOptMax</code>.
* @throws ArithmeticException if this instance has strictly less shrink or stretch
* than the operand
*/
public MinOptMax minus(MinOptMax operand) throws ArithmeticException {
checkCompatibility(getShrink(), operand.getShrink(), "shrink");
checkCompatibility(getStretch(), operand.getStretch(), "stretch");
return new MinOptMax(min - operand.min, opt - operand.opt, max - operand.max);
}
private void checkCompatibility(int thisElasticity, int operandElasticity, String msge) {
if (thisElasticity < operandElasticity) {
throw new ArithmeticException(
"Cannot subtract a MinOptMax from another MinOptMax that has less " + msge
+ " (" + thisElasticity + " < " + operandElasticity + ")");
}
}
/**
* Subtracts the given value from all three components of this instance and returns the result.
*
* @param value value to subtract from the min, opt, max components
* @return the result of the subtraction
*/
public MinOptMax minus(int value) {
return new MinOptMax(min - value, opt - value, max - value);
}
/**
* Do not use, backwards compatibility only. Returns an instance with the
* given value added to the minimal value.
*
* @param minOperand the minimal value to be added.
* @return an instance with the given value added to the minimal value.
* @throws IllegalArgumentException if
* <code>min + minOperand > opt || max < opt</code>.
*/
public MinOptMax plusMin(int minOperand) throws IllegalArgumentException {
return getInstance(min + minOperand, opt, max);
}
/**
* Do not use, backwards compatibility only. Returns an instance with the
* given value subtracted to the minimal value.
*
* @param minOperand the minimal value to be subtracted.
* @return an instance with the given value subtracted to the minimal value.
* @throws IllegalArgumentException if
* <code>min - minOperand > opt || max < opt</code>.
*/
public MinOptMax minusMin(int minOperand) throws IllegalArgumentException {
return getInstance(min - minOperand, opt, max);
}
/**
* Do not use, backwards compatibility only. Returns an instance with the
* given value added to the maximal value.
*
* @param maxOperand the maximal value to be added.
* @return an instance with the given value added to the maximal value.
* @throws IllegalArgumentException if
* <code>min > opt || max < opt + maxOperand</code>.
*/
public MinOptMax plusMax(int maxOperand) throws IllegalArgumentException {
return getInstance(min, opt, max + maxOperand);
}
/**
* Do not use, backwards compatibility only. Returns an instance with the
* given value subtracted to the maximal value.
*
* @param maxOperand the maximal value to be subtracted.
* @return an instance with the given value subtracted to the maximal value.
* @throws IllegalArgumentException if
* <code>min > opt || max < opt - maxOperand</code>.
*/
public MinOptMax minusMax(int maxOperand) throws IllegalArgumentException {
return getInstance(min, opt, max - maxOperand);
}
/**
* Returns the product of this <code>MinOptMax</code> and the given factor.
*
* @param factor the factor
* @return the product of this <code>MinOptMax</code> and the given factor
* @throws IllegalArgumentException if the factor is negative
*/
public MinOptMax mult(int factor) throws IllegalArgumentException {
if (factor < 0) {
throw new IllegalArgumentException("factor < 0; was: " + factor);
} else if (factor == 1) {
return this;
} else {
return getInstance(min * factor, opt * factor, max * factor);
}
}
/**
* Determines whether this <code>MinOptMax</code> represents a non-zero dimension, which means
* that not all values (min, opt, max) are zero.
*
* @return <code>true</code> if this <code>MinOptMax</code> represents a non-zero dimension;
* <code>false</code> otherwise.
*/
public boolean isNonZero() {
return min != 0 || max != 0;
}
/**
* Determines whether this <code>MinOptMax</code> doesn't allow for shrinking or stretching,
* which means that all values (min, opt, max) are the same.
*
* @return <code>true</code> if whether this <code>MinOptMax</code> doesn't allow for shrinking
* or stretching; <code>false</code> otherwise.
* @see #isElastic()
*/
public boolean isStiff() {
return min == max;
}
/**
* Determines whether this <code>MinOptMax</code> allows for shrinking or stretching, which
* means that at least one of the min or max values isn't equal to the opt value.
*
* @return <code>true</code> if this <code>MinOptMax</code> allows for shrinking or stretching;
* <code>false</code> otherwise.
* @see #isStiff()
*/
public boolean isElastic() {
return min != opt || opt != max;
}
/**
* Extends the minimum length to the given length if necessary, and adjusts opt and max
* accordingly.
*
* @param newMin the new minimum length
* @return a <code>MinOptMax</code> instance with the minimum length extended
*/
public MinOptMax extendMinimum(int newMin) {
if (min < newMin) {
int newOpt = Math.max(newMin, opt);
int newMax = Math.max(newOpt, max);
return getInstance(newMin, newOpt, newMax);
} else {
return this;
}
}
/**
* {@inheritDoc}
*/
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
MinOptMax minOptMax = (MinOptMax) obj;
return opt == minOptMax.opt && max == minOptMax.max && min == minOptMax.min;
}
/**
* {@inheritDoc}
*/
public int hashCode() {
int result = min;
result = 31 * result + opt;
result = 31 * result + max;
return result;
}
/**
* {@inheritDoc}
*/
public String toString() {
return "MinOptMax[min = " + min + ", opt = " + opt + ", max = " + max + "]";
}
}
|
chunlinyao/fop
|
fop-core/src/main/java/org/apache/fop/traits/MinOptMax.java
|
Java
|
apache-2.0
| 12,281 | 34.700581 | 100 | 0.627473 | false |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.services.jbpm.search;
import org.jbpm.kie.services.impl.query.SqlQueryDefinition;
import org.jbpm.services.api.query.QueryService;
import org.jbpm.services.api.query.model.QueryDefinition;
import org.jbpm.services.api.query.model.QueryDefinition.Target;
import org.kie.server.api.KieServerConstants;
import org.kie.server.api.model.definition.BaseQueryFilterSpec;
import org.kie.server.api.model.definition.ProcessInstanceQueryFilterSpec;
import org.kie.server.api.model.instance.ProcessInstanceList;
import org.kie.server.services.api.KieServerRegistry;
import org.kie.server.services.impl.marshal.MarshallerHelper;
import org.kie.server.services.jbpm.search.util.ProcessInstanceQueryStrategy;
import org.kie.server.services.jbpm.search.util.QueryStrategy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProcessInstanceSearchServiceBase extends AbstractSearchServiceBase {
private static final Logger logger = LoggerFactory.getLogger(TaskSearchServiceBase.class);
private static final String MAPPER_NAME = "ProcessInstancesWithCustomVariables";
private static final String PROCESS_INSTANCE_QUERY_NAME = "getProcessInstancesWithFilters";
private MarshallerHelper marshallerHelper;
private QueryServiceTemplate queryServiceTemplate;
private QueryCallback queryCallback;
public ProcessInstanceSearchServiceBase(QueryService queryService, KieServerRegistry context) {
this.queryServiceTemplate = new QueryServiceTemplate(queryService);
this.marshallerHelper = new MarshallerHelper(context);
// Register (or replace) query.
String processInstanceQuerySource = context.getConfig().getConfigItemValue(KieServerConstants.CFG_PERSISTANCE_DS,
"java:jboss/datasources/ExampleDS");
QueryStrategy queryStrategy = new ProcessInstanceQueryStrategy();
this.queryCallback = new QueryCallback() {
@Override
public QueryStrategy getQueryStrategy() {
return queryStrategy;
}
@Override
public String getQueryName() {
return PROCESS_INSTANCE_QUERY_NAME;
}
@Override
public String getMapperName() {
return MAPPER_NAME;
}
};
QueryDefinition queryDefinition = new SqlQueryDefinition(PROCESS_INSTANCE_QUERY_NAME, processInstanceQuerySource, Target.CUSTOM);
queryDefinition.setExpression(queryStrategy.getQueryExpression());
queryService.replaceQuery(queryDefinition);
}
public ProcessInstanceList getProcessInstancesWithFilters(Integer page, Integer pageSize, String payload, String marshallingType) {
RequestCallback reqCallback = new RequestCallback() {
@Override
public BaseQueryFilterSpec getQueryFilterSpec() {
return marshallerHelper.unmarshal(payload, marshallingType, ProcessInstanceQueryFilterSpec.class);
}
};
return queryServiceTemplate.getWithFilters(page, pageSize, queryCallback, reqCallback);
}
}
|
markcoble/droolsjbpm-integration
|
kie-server-parent/kie-server-services/kie-server-services-jbpm-search/src/main/java/org/kie/server/services/jbpm/search/ProcessInstanceSearchServiceBase.java
|
Java
|
apache-2.0
| 3,458 | 35.4 | 132 | 0.79439 | false |
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use prelude::v1::*;
use ascii::*;
use collections::HashMap;
use collections;
use env;
use ffi::{OsString, OsStr};
use fmt;
use fs;
use io::{self, Error};
use libc::{self, c_void};
use os::windows::ffi::OsStrExt;
use ptr;
use sync::{StaticMutex, MUTEX_INIT};
use sys::handle::Handle;
use sys::pipe2::AnonPipe;
use sys::{self, cvt};
use sys_common::{AsInner, FromInner};
////////////////////////////////////////////////////////////////////////////////
// Command
////////////////////////////////////////////////////////////////////////////////
fn mk_key(s: &OsStr) -> OsString {
FromInner::from_inner(sys::os_str::Buf {
inner: s.as_inner().inner.to_ascii_uppercase()
})
}
#[derive(Clone)]
pub struct Command {
pub program: OsString,
pub args: Vec<OsString>,
pub env: Option<HashMap<OsString, OsString>>,
pub cwd: Option<OsString>,
pub detach: bool, // not currently exposed in std::process
}
impl Command {
pub fn new(program: &OsStr) -> Command {
Command {
program: program.to_os_string(),
args: Vec::new(),
env: None,
cwd: None,
detach: false,
}
}
pub fn arg(&mut self, arg: &OsStr) {
self.args.push(arg.to_os_string())
}
pub fn args<'a, I: Iterator<Item = &'a OsStr>>(&mut self, args: I) {
self.args.extend(args.map(OsStr::to_os_string))
}
fn init_env_map(&mut self){
if self.env.is_none() {
self.env = Some(env::vars_os().map(|(key, val)| {
(mk_key(&key), val)
}).collect());
}
}
pub fn env(&mut self, key: &OsStr, val: &OsStr) {
self.init_env_map();
self.env.as_mut().unwrap().insert(mk_key(key), val.to_os_string());
}
pub fn env_remove(&mut self, key: &OsStr) {
self.init_env_map();
self.env.as_mut().unwrap().remove(&mk_key(key));
}
pub fn env_clear(&mut self) {
self.env = Some(HashMap::new())
}
pub fn cwd(&mut self, dir: &OsStr) {
self.cwd = Some(dir.to_os_string())
}
}
////////////////////////////////////////////////////////////////////////////////
// Processes
////////////////////////////////////////////////////////////////////////////////
// `CreateProcess` is racy!
// http://support.microsoft.com/kb/315939
static CREATE_PROCESS_LOCK: StaticMutex = MUTEX_INIT;
/// A value representing a child process.
///
/// The lifetime of this value is linked to the lifetime of the actual
/// process - the Process destructor calls self.finish() which waits
/// for the process to terminate.
pub struct Process {
/// A HANDLE to the process, which will prevent the pid being
/// re-used until the handle is closed.
handle: Handle,
}
pub enum Stdio {
Inherit,
Piped(AnonPipe),
None,
}
impl Process {
#[allow(deprecated)]
pub fn spawn(cfg: &Command,
in_fd: Stdio,
out_fd: Stdio,
err_fd: Stdio) -> io::Result<Process>
{
use libc::types::os::arch::extra::{DWORD, HANDLE, STARTUPINFO};
use libc::consts::os::extra::{
TRUE, FALSE,
STARTF_USESTDHANDLES,
INVALID_HANDLE_VALUE,
DUPLICATE_SAME_ACCESS
};
use libc::funcs::extra::kernel32::{
GetCurrentProcess,
DuplicateHandle,
CloseHandle,
CreateProcessW
};
use env::split_paths;
use mem;
use iter::Iterator;
// To have the spawning semantics of unix/windows stay the same, we need to
// read the *child's* PATH if one is provided. See #15149 for more details.
let program = cfg.env.as_ref().and_then(|env| {
for (key, v) in env {
if OsStr::from_str("PATH") != &**key { continue }
// Split the value and test each path to see if the
// program exists.
for path in split_paths(&v) {
let path = path.join(cfg.program.to_str().unwrap())
.with_extension(env::consts::EXE_EXTENSION);
if fs::metadata(&path).is_ok() {
return Some(path.into_os_string())
}
}
break
}
None
});
unsafe {
let mut si = zeroed_startupinfo();
si.cb = mem::size_of::<STARTUPINFO>() as DWORD;
si.dwFlags = STARTF_USESTDHANDLES;
let cur_proc = GetCurrentProcess();
let set_fd = |fd: &Stdio, slot: &mut HANDLE,
is_stdin: bool| {
match *fd {
Stdio::Inherit => {}
// Similarly to unix, we don't actually leave holes for the
// stdio file descriptors, but rather open up /dev/null
// equivalents. These equivalents are drawn from libuv's
// windows process spawning.
Stdio::None => {
let access = if is_stdin {
libc::FILE_GENERIC_READ
} else {
libc::FILE_GENERIC_WRITE | libc::FILE_READ_ATTRIBUTES
};
let size = mem::size_of::<libc::SECURITY_ATTRIBUTES>();
let mut sa = libc::SECURITY_ATTRIBUTES {
nLength: size as libc::DWORD,
lpSecurityDescriptor: ptr::null_mut(),
bInheritHandle: 1,
};
let mut filename: Vec<u16> = "NUL".utf16_units().collect();
filename.push(0);
*slot = libc::CreateFileW(filename.as_ptr(),
access,
libc::FILE_SHARE_READ |
libc::FILE_SHARE_WRITE,
&mut sa,
libc::OPEN_EXISTING,
0,
ptr::null_mut());
if *slot == INVALID_HANDLE_VALUE {
return Err(Error::last_os_error())
}
}
Stdio::Piped(ref pipe) => {
let orig = pipe.raw();
if DuplicateHandle(cur_proc, orig, cur_proc, slot,
0, TRUE, DUPLICATE_SAME_ACCESS) == FALSE {
return Err(Error::last_os_error())
}
}
}
Ok(())
};
try!(set_fd(&in_fd, &mut si.hStdInput, true));
try!(set_fd(&out_fd, &mut si.hStdOutput, false));
try!(set_fd(&err_fd, &mut si.hStdError, false));
let mut cmd_str = make_command_line(program.as_ref().unwrap_or(&cfg.program),
&cfg.args);
cmd_str.push(0); // add null terminator
let mut pi = zeroed_process_information();
let mut create_err = None;
// stolen from the libuv code.
let mut flags = libc::CREATE_UNICODE_ENVIRONMENT;
if cfg.detach {
flags |= libc::DETACHED_PROCESS | libc::CREATE_NEW_PROCESS_GROUP;
}
with_envp(cfg.env.as_ref(), |envp| {
with_dirp(cfg.cwd.as_ref(), |dirp| {
let _lock = CREATE_PROCESS_LOCK.lock().unwrap();
let created = CreateProcessW(ptr::null(),
cmd_str.as_mut_ptr(),
ptr::null_mut(),
ptr::null_mut(),
TRUE,
flags, envp, dirp,
&mut si, &mut pi);
if created == FALSE {
create_err = Some(Error::last_os_error());
}
})
});
if !in_fd.inherited() {
assert!(CloseHandle(si.hStdInput) != 0);
}
if !out_fd.inherited() {
assert!(CloseHandle(si.hStdOutput) != 0);
}
if !err_fd.inherited() {
assert!(CloseHandle(si.hStdError) != 0);
}
match create_err {
Some(err) => return Err(err),
None => {}
}
// We close the thread handle because we don't care about keeping the
// thread id valid, and we aren't keeping the thread handle around to be
// able to close it later. We don't close the process handle however
// because std::we want the process id to stay valid at least until the
// calling code closes the process handle.
assert!(CloseHandle(pi.hThread) != 0);
Ok(Process {
handle: Handle::new(pi.hProcess)
})
}
}
pub unsafe fn kill(&self) -> io::Result<()> {
try!(cvt(libc::TerminateProcess(self.handle.raw(), 1)));
Ok(())
}
pub fn wait(&self) -> io::Result<ExitStatus> {
use libc::consts::os::extra::{
FALSE,
STILL_ACTIVE,
INFINITE,
WAIT_OBJECT_0,
};
use libc::funcs::extra::kernel32::{
GetExitCodeProcess,
WaitForSingleObject,
};
unsafe {
loop {
let mut status = 0;
if GetExitCodeProcess(self.handle.raw(), &mut status) == FALSE {
let err = Err(Error::last_os_error());
return err;
}
if status != STILL_ACTIVE {
return Ok(ExitStatus(status as i32));
}
match WaitForSingleObject(self.handle.raw(), INFINITE) {
WAIT_OBJECT_0 => {}
_ => {
let err = Err(Error::last_os_error());
return err
}
}
}
}
}
}
impl Stdio {
fn inherited(&self) -> bool {
match *self { Stdio::Inherit => true, _ => false }
}
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub struct ExitStatus(i32);
impl ExitStatus {
pub fn success(&self) -> bool {
self.0 == 0
}
pub fn code(&self) -> Option<i32> {
Some(self.0)
}
}
impl fmt::Display for ExitStatus {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "exit code: {}", self.0)
}
}
fn zeroed_startupinfo() -> libc::types::os::arch::extra::STARTUPINFO {
libc::types::os::arch::extra::STARTUPINFO {
cb: 0,
lpReserved: ptr::null_mut(),
lpDesktop: ptr::null_mut(),
lpTitle: ptr::null_mut(),
dwX: 0,
dwY: 0,
dwXSize: 0,
dwYSize: 0,
dwXCountChars: 0,
dwYCountCharts: 0,
dwFillAttribute: 0,
dwFlags: 0,
wShowWindow: 0,
cbReserved2: 0,
lpReserved2: ptr::null_mut(),
hStdInput: libc::INVALID_HANDLE_VALUE,
hStdOutput: libc::INVALID_HANDLE_VALUE,
hStdError: libc::INVALID_HANDLE_VALUE,
}
}
fn zeroed_process_information() -> libc::types::os::arch::extra::PROCESS_INFORMATION {
libc::types::os::arch::extra::PROCESS_INFORMATION {
hProcess: ptr::null_mut(),
hThread: ptr::null_mut(),
dwProcessId: 0,
dwThreadId: 0
}
}
// Produces a wide string *without terminating null*
fn make_command_line(prog: &OsStr, args: &[OsString]) -> Vec<u16> {
let mut cmd: Vec<u16> = Vec::new();
append_arg(&mut cmd, prog);
for arg in args {
cmd.push(' ' as u16);
append_arg(&mut cmd, arg);
}
return cmd;
fn append_arg(cmd: &mut Vec<u16>, arg: &OsStr) {
// If an argument has 0 characters then we need to quote it to ensure
// that it actually gets passed through on the command line or otherwise
// it will be dropped entirely when parsed on the other end.
let arg_bytes = &arg.as_inner().inner.as_inner();
let quote = arg_bytes.iter().any(|c| *c == b' ' || *c == b'\t')
|| arg_bytes.len() == 0;
if quote {
cmd.push('"' as u16);
}
let mut iter = arg.encode_wide();
while let Some(x) = iter.next() {
if x == '"' as u16 {
// escape quotes
cmd.push('\\' as u16);
cmd.push('"' as u16);
} else if x == '\\' as u16 {
// is this a run of backslashes followed by a " ?
if iter.clone().skip_while(|y| *y == '\\' as u16).next() == Some('"' as u16) {
// Double it ... NOTE: this behavior is being
// preserved as it's been part of Rust for a long
// time, but no one seems to know exactly why this
// is the right thing to do.
cmd.push('\\' as u16);
cmd.push('\\' as u16);
} else {
// Push it through unescaped
cmd.push('\\' as u16);
}
} else {
cmd.push(x)
}
}
if quote {
cmd.push('"' as u16);
}
}
}
fn with_envp<F, T>(env: Option<&collections::HashMap<OsString, OsString>>, cb: F) -> T
where F: FnOnce(*mut c_void) -> T,
{
// On Windows we pass an "environment block" which is not a char**, but
// rather a concatenation of null-terminated k=v\0 sequences, with a final
// \0 to terminate.
match env {
Some(env) => {
let mut blk = Vec::new();
for pair in env {
blk.extend(pair.0.encode_wide());
blk.push('=' as u16);
blk.extend(pair.1.encode_wide());
blk.push(0);
}
blk.push(0);
cb(blk.as_mut_ptr() as *mut c_void)
}
_ => cb(ptr::null_mut())
}
}
fn with_dirp<T, F>(d: Option<&OsString>, cb: F) -> T where
F: FnOnce(*const u16) -> T,
{
match d {
Some(dir) => {
let mut dir_str: Vec<u16> = dir.encode_wide().collect();
dir_str.push(0);
cb(dir_str.as_ptr())
},
None => cb(ptr::null())
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use str;
use ffi::{OsStr, OsString};
use super::make_command_line;
#[test]
fn test_make_command_line() {
fn test_wrapper(prog: &str, args: &[&str]) -> String {
String::from_utf16(
&make_command_line(OsStr::from_str(prog),
&args.iter()
.map(|a| OsString::from(a))
.collect::<Vec<OsString>>())).unwrap()
}
assert_eq!(
test_wrapper("prog", &["aaa", "bbb", "ccc"]),
"prog aaa bbb ccc"
);
assert_eq!(
test_wrapper("C:\\Program Files\\blah\\blah.exe", &["aaa"]),
"\"C:\\Program Files\\blah\\blah.exe\" aaa"
);
assert_eq!(
test_wrapper("C:\\Program Files\\test", &["aa\"bb"]),
"\"C:\\Program Files\\test\" aa\\\"bb"
);
assert_eq!(
test_wrapper("echo", &["a b c"]),
"echo \"a b c\""
);
assert_eq!(
test_wrapper("\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}", &[]),
"\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}"
);
}
}
|
avdi/rust
|
src/libstd/sys/windows/process2.rs
|
Rust
|
apache-2.0
| 16,543 | 32.487854 | 94 | 0.457837 | false |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.opsworks.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.opsworks.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* RaidArray JSON Unmarshaller
*/
public class RaidArrayJsonUnmarshaller implements
Unmarshaller<RaidArray, JsonUnmarshallerContext> {
public RaidArray unmarshall(JsonUnmarshallerContext context)
throws Exception {
RaidArray raidArray = new RaidArray();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RaidArrayId", targetDepth)) {
context.nextToken();
raidArray.setRaidArrayId(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("InstanceId", targetDepth)) {
context.nextToken();
raidArray.setInstanceId(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("Name", targetDepth)) {
context.nextToken();
raidArray.setName(context.getUnmarshaller(String.class)
.unmarshall(context));
}
if (context.testExpression("RaidLevel", targetDepth)) {
context.nextToken();
raidArray.setRaidLevel(context.getUnmarshaller(
Integer.class).unmarshall(context));
}
if (context.testExpression("NumberOfDisks", targetDepth)) {
context.nextToken();
raidArray.setNumberOfDisks(context.getUnmarshaller(
Integer.class).unmarshall(context));
}
if (context.testExpression("Size", targetDepth)) {
context.nextToken();
raidArray.setSize(context.getUnmarshaller(Integer.class)
.unmarshall(context));
}
if (context.testExpression("Device", targetDepth)) {
context.nextToken();
raidArray.setDevice(context.getUnmarshaller(String.class)
.unmarshall(context));
}
if (context.testExpression("MountPoint", targetDepth)) {
context.nextToken();
raidArray.setMountPoint(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("AvailabilityZone", targetDepth)) {
context.nextToken();
raidArray.setAvailabilityZone(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("CreatedAt", targetDepth)) {
context.nextToken();
raidArray.setCreatedAt(context
.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("StackId", targetDepth)) {
context.nextToken();
raidArray.setStackId(context.getUnmarshaller(String.class)
.unmarshall(context));
}
if (context.testExpression("VolumeType", targetDepth)) {
context.nextToken();
raidArray.setVolumeType(context.getUnmarshaller(
String.class).unmarshall(context));
}
if (context.testExpression("Iops", targetDepth)) {
context.nextToken();
raidArray.setIops(context.getUnmarshaller(Integer.class)
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return raidArray;
}
private static RaidArrayJsonUnmarshaller instance;
public static RaidArrayJsonUnmarshaller getInstance() {
if (instance == null)
instance = new RaidArrayJsonUnmarshaller();
return instance;
}
}
|
flofreud/aws-sdk-java
|
aws-java-sdk-opsworks/src/main/java/com/amazonaws/services/opsworks/model/transform/RaidArrayJsonUnmarshaller.java
|
Java
|
apache-2.0
| 5,908 | 40.605634 | 80 | 0.558057 | false |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.CoderException;
import com.google.cloud.dataflow.sdk.runners.worker.windmill.Windmill;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.values.CodedTupleTag;
import com.google.cloud.dataflow.sdk.values.TimestampedValue;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Throwables;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.protobuf.ByteString;
import org.joda.time.Instant;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
/**
* A write-back cache for the tag and tag list state computed during a given stage.
*
* <p>This does not synchronize changes across multiple threads or multiple workers.
*/
class KeyedStateCache {
private static final Predicate<TagListUpdates<?>> IS_DELETE_TAG_LIST =
new Predicate<TagListUpdates<?>>() {
@Override
public boolean apply(TagListUpdates<?> input) {
return input.isDelete;
}
};
private final LoadingCache<CodedTupleTag<?>, Optional<?>> tagCache;
private final Map<CodedTupleTag<?>, KeyedStateCache.TagUpdates<?>> localTagUpdates =
new LinkedHashMap<>();
private final LoadingCache<CodedTupleTag<?>, List<?>> tagListCache;
private final Map<CodedTupleTag<?>, KeyedStateCache.TagListUpdates<?>> localTagListUpdates =
new LinkedHashMap<>();
private String tagPrefix;
public KeyedStateCache(String tagPrefix,
LoadingCache<CodedTupleTag<?>, Optional<?>> tagCache,
LoadingCache<CodedTupleTag<?>, List<?>> tagListCache) {
this.tagPrefix = tagPrefix;
this.tagCache = tagCache;
this.tagListCache = tagListCache;
}
private <T> KeyedStateCache.TagUpdates<T> getOrCreateTagUpdate(CodedTupleTag<T> tag) {
@SuppressWarnings("unchecked")
KeyedStateCache.TagUpdates<T> update = (KeyedStateCache.TagUpdates<T>) localTagUpdates.get(tag);
if (update == null) {
update = new KeyedStateCache.TagUpdates<>();
localTagUpdates.put(tag, update);
}
return update;
}
private <T> KeyedStateCache.TagListUpdates<T> getOrCreateTagListUpdate(CodedTupleTag<T> tag) {
@SuppressWarnings("unchecked")
KeyedStateCache.TagListUpdates<T> update =
(KeyedStateCache.TagListUpdates<T>) localTagListUpdates.get(tag);
if (update == null) {
update = new KeyedStateCache.TagListUpdates<>();
localTagListUpdates.put(tag, update);
}
return update;
}
public void removeTags(CodedTupleTag<?>... tags) {
for (CodedTupleTag<?> tag : tags) {
getOrCreateTagUpdate(tag).markRemoved();
}
}
public <T> void store(CodedTupleTag<T> tag, T value, Instant timestamp) {
getOrCreateTagUpdate(tag).set(value, timestamp);
}
public Map<CodedTupleTag<?>, Object> lookupTags(Iterable<? extends CodedTupleTag<?>> tags)
throws IOException {
try {
ImmutableMap.Builder<CodedTupleTag<?>, Object> outputBuilder = ImmutableMap.builder();
// Figure out which tags can be fully satisfied with local data, and add them to the output.
// Other tags, will need to be looked up.
List<CodedTupleTag<?>> nonLocalTags = new ArrayList<>();
for (CodedTupleTag<?> tag : tags) {
TagUpdates<?> localUpdates = localTagUpdates.get(tag);
if (localUpdates != null) {
// ImmutableMap's can't hold null, so we just skip putting the value in if its null.
if (localUpdates.getUpdatedValue() != null) {
outputBuilder.put(tag, localUpdates.getUpdatedValue());
}
} else {
nonLocalTags.add(tag);
}
}
for (Map.Entry<CodedTupleTag<?>, Optional<?>> entry
: tagCache.getAll(nonLocalTags).entrySet()) {
if (entry.getValue().isPresent()) {
outputBuilder.put(entry.getKey(), entry.getValue().get());
}
}
return outputBuilder.build();
} catch (ExecutionException e) {
Throwables.propagateIfInstanceOf(e.getCause(), IOException.class);
throw Throwables.propagate(e.getCause());
}
}
public void removeTagLists(CodedTupleTag<?>... tagLists) {
for (CodedTupleTag<?> tagList : tagLists) {
getOrCreateTagListUpdate(tagList).markRemoved();
}
}
public <T> void writeToTagList(CodedTupleTag<T> tag, T value, Instant timestamp) {
getOrCreateTagListUpdate(tag).add(value, timestamp);
}
public Map<CodedTupleTag<?>, Iterable<?>> readTagLists(Iterable<CodedTupleTag<?>> tags)
throws IOException {
try {
ImmutableMap.Builder<CodedTupleTag<?>, Iterable<?>> outputBuilder = ImmutableMap.builder();
// Figure out which tags can be fully satisfied with local data, and add them to the output.
// Other tags, will need to be looked up.
List<CodedTupleTag<?>> nonDeletedTags = new ArrayList<>();
for (CodedTupleTag<?> tag : tags) {
TagListUpdates<?> localUpdates = localTagListUpdates.get(tag);
if (localUpdates != null && localUpdates.isDelete) {
// For locally deleted items, we don't need to do a lookup at all
outputBuilder.put(tag, localUpdates.getAddedItems());
} else {
nonDeletedTags.add(tag);
}
}
// For any non-deleted tag, look it up in the tagListCache, and build output by combining
ImmutableMap<CodedTupleTag<?>, List<?>> cachedContents = tagListCache.getAll(nonDeletedTags);
for (Map.Entry<CodedTupleTag<?>, List<?>> lookedUp : cachedContents.entrySet()) {
CodedTupleTag<?> tag = lookedUp.getKey();
TagListUpdates<?> localUpdates = localTagListUpdates.get(tag);
outputBuilder.put(tag, localUpdates == null
? lookedUp.getValue() : localUpdates.mergeWith(lookedUp.getValue()));
}
return outputBuilder.build();
} catch (ExecutionException e) {
Throwables.propagateIfInstanceOf(e.getCause(), IOException.class);
throw Throwables.propagate(e.getCause());
}
}
public void flushTo(Windmill.WorkItemCommitRequest.Builder outputBuilder) throws IOException {
// Make sure that we've done lookups for the tag-writes, tag-deletes, and tag-list-deletes.
try {
tagCache.getAll(localTagUpdates.keySet());
tagListCache.getAll(Maps.filterValues(localTagListUpdates, IS_DELETE_TAG_LIST).keySet());
} catch (ExecutionException e) {
Throwables.propagateIfInstanceOf(e.getCause(), IOException.class);
throw Throwables.propagate(e.getCause());
}
// Flush the local tag and tag list updates to the commit request
for (Map.Entry<CodedTupleTag<?>, TagUpdates<?>> update : localTagUpdates.entrySet()) {
update.getValue().flushTo(update.getKey(), outputBuilder);
}
for (Map.Entry<CodedTupleTag<?>, TagListUpdates<?>> update : localTagListUpdates.entrySet()) {
update.getValue().flushTo(update.getKey(), outputBuilder);
}
// Clear the caches and local updates
tagCache.invalidateAll();
tagListCache.invalidateAll();
localTagUpdates.clear();
localTagListUpdates.clear();
}
private ByteString serializeTag(CodedTupleTag<?> tag) {
return ByteString.copyFromUtf8(tagPrefix + tag.getId());
}
private class TagUpdates<T> {
private T updatedValue;
private Instant updatedTimestamp;
boolean removed;
private void set(T newValue, Instant newTimestamp) {
removed = false;
updatedTimestamp = newTimestamp;
updatedValue = newValue;
}
public T getUpdatedValue() {
return updatedValue;
}
private void markRemoved() {
removed = true;
updatedTimestamp = BoundedWindow.TIMESTAMP_MAX_VALUE;
updatedValue = null;
}
private void flushTo(
CodedTupleTag<?> wildcardTag, Windmill.WorkItemCommitRequest.Builder outputBuilder)
throws CoderException, IOException {
Windmill.Value.Builder valueBuilder = outputBuilder.addValueUpdatesBuilder()
.setTag(serializeTag(wildcardTag))
.getValueBuilder();
if (removed) {
valueBuilder
.setTimestamp(Long.MAX_VALUE)
.setData(ByteString.EMPTY);
} else {
@SuppressWarnings("unchecked")
CodedTupleTag<T> tag = (CodedTupleTag<T>) wildcardTag;
ByteString.Output stream = ByteString.newOutput();
tag.getCoder().encode(updatedValue, stream, Coder.Context.OUTER);
valueBuilder
.setTimestamp(TimeUnit.MILLISECONDS.toMicros(updatedTimestamp.getMillis()))
.setData(stream.toByteString());
}
}
}
private class TagListUpdates<T> {
boolean isDelete = false;
List<TimestampedValue<T>> added = new ArrayList<>();
private void markRemoved() {
isDelete = true;
added.clear();
}
private void add(T value, Instant timestamp) {
added.add(TimestampedValue.of(value, timestamp));
}
private List<T> getAddedItems() {
List<T> addedItems = Lists.newArrayList();
for (TimestampedValue<T> item : added) {
addedItems.add(item.getValue());
}
return addedItems;
}
public List<T> mergeWith(List<?> wildcardValue) {
@SuppressWarnings("unchecked")
List<T> value = (List<T>) wildcardValue;
List<T> addedItems = getAddedItems();
List<T> all = new ArrayList<>(wildcardValue.size() + addedItems.size());
all.addAll(value);
all.addAll(addedItems);
return Collections.unmodifiableList(all);
}
private void flushTo(
CodedTupleTag<?> wildcardTag, Windmill.WorkItemCommitRequest.Builder outputBuilder)
throws IOException {
// First do the delete, if necessary and there were previously elements
try {
if (isDelete && tagListCache.get(wildcardTag).size() > 0) {
outputBuilder.addListUpdatesBuilder()
.setTag(serializeTag(wildcardTag))
.setEndTimestamp(Long.MAX_VALUE);
}
} catch (ExecutionException e) {
Throwables.propagateIfInstanceOf(e.getCause(), IOException.class);
throw Throwables.propagate(e.getCause());
}
// Then, add all the elements
if (added.size() > 0) {
@SuppressWarnings("unchecked")
CodedTupleTag<T> tag = (CodedTupleTag<T>) wildcardTag;
Windmill.TagList.Builder listBuilder = outputBuilder.addListUpdatesBuilder()
.setTag(serializeTag(wildcardTag));
for (TimestampedValue<T> value : added) {
ByteString.Output stream = ByteString.newOutput();
// Windmill does not support empty data for tag list state; prepend a zero byte.
byte[] zero = {0x0};
stream.write(zero);
// Encode the value
tag.getCoder().encode(value.getValue(), stream, Coder.Context.OUTER);
listBuilder.addValuesBuilder()
.setData(stream.toByteString())
.setTimestamp(TimeUnit.MILLISECONDS.toMicros(value.getTimestamp().getMillis()));
}
}
}
}
}
|
springml/DataflowJavaSDK
|
sdk/src/main/java/com/google/cloud/dataflow/sdk/util/KeyedStateCache.java
|
Java
|
apache-2.0
| 12,066 | 35.01791 | 100 | 0.679015 | false |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/videointelligence/v1/video_intelligence.proto
namespace Google\Cloud\VideoIntelligence\V1;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* A generic detected attribute represented by name in string format.
*
* Generated from protobuf message <code>google.cloud.videointelligence.v1.DetectedAttribute</code>
*/
class DetectedAttribute extends \Google\Protobuf\Internal\Message
{
/**
* The name of the attribute, for example, glasses, dark_glasses, mouth_open.
* A full list of supported type names will be provided in the document.
*
* Generated from protobuf field <code>string name = 1;</code>
*/
private $name = '';
/**
* Detected attribute confidence. Range [0, 1].
*
* Generated from protobuf field <code>float confidence = 2;</code>
*/
private $confidence = 0.0;
/**
* Text value of the detection result. For example, the value for "HairColor"
* can be "black", "blonde", etc.
*
* Generated from protobuf field <code>string value = 3;</code>
*/
private $value = '';
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type string $name
* The name of the attribute, for example, glasses, dark_glasses, mouth_open.
* A full list of supported type names will be provided in the document.
* @type float $confidence
* Detected attribute confidence. Range [0, 1].
* @type string $value
* Text value of the detection result. For example, the value for "HairColor"
* can be "black", "blonde", etc.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Cloud\Videointelligence\V1\VideoIntelligence::initOnce();
parent::__construct($data);
}
/**
* The name of the attribute, for example, glasses, dark_glasses, mouth_open.
* A full list of supported type names will be provided in the document.
*
* Generated from protobuf field <code>string name = 1;</code>
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* The name of the attribute, for example, glasses, dark_glasses, mouth_open.
* A full list of supported type names will be provided in the document.
*
* Generated from protobuf field <code>string name = 1;</code>
* @param string $var
* @return $this
*/
public function setName($var)
{
GPBUtil::checkString($var, True);
$this->name = $var;
return $this;
}
/**
* Detected attribute confidence. Range [0, 1].
*
* Generated from protobuf field <code>float confidence = 2;</code>
* @return float
*/
public function getConfidence()
{
return $this->confidence;
}
/**
* Detected attribute confidence. Range [0, 1].
*
* Generated from protobuf field <code>float confidence = 2;</code>
* @param float $var
* @return $this
*/
public function setConfidence($var)
{
GPBUtil::checkFloat($var);
$this->confidence = $var;
return $this;
}
/**
* Text value of the detection result. For example, the value for "HairColor"
* can be "black", "blonde", etc.
*
* Generated from protobuf field <code>string value = 3;</code>
* @return string
*/
public function getValue()
{
return $this->value;
}
/**
* Text value of the detection result. For example, the value for "HairColor"
* can be "black", "blonde", etc.
*
* Generated from protobuf field <code>string value = 3;</code>
* @param string $var
* @return $this
*/
public function setValue($var)
{
GPBUtil::checkString($var, True);
$this->value = $var;
return $this;
}
}
|
googleapis/google-cloud-php-videointelligence
|
src/V1/DetectedAttribute.php
|
PHP
|
apache-2.0
| 4,117 | 27.79021 | 99 | 0.604081 | false |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import com.google.common.primitives.Longs
import org.apache.accumulo.core.client.IteratorSetting
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex
import org.locationtech.geomesa.accumulo.index.legacy.z2.Z2IndexV1
import org.locationtech.geomesa.curve.LegacyZ2SFC
import org.locationtech.geomesa.index.iterators.DensityScan.DensityResult
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.sfcurve.zorder.Z2
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
/**
* Density iterator that weights hits based on z2 schema
*/
class Z2DensityIterator extends KryoLazyDensityIterator {
// TODO GEOMESA-1164 shares a lot of code with Z3DensityIter
import Z2DensityIterator.TableSharingKey
val zBytes = Array.fill[Byte](8)(0)
override protected def initResult(sft: SimpleFeatureType,
transform: Option[SimpleFeatureType],
options: Map[String, String]): DensityResult = {
val result = super.initResult(sft, transform, options)
if (sft.nonPoints) {
// normalize the weight based on how many representations of the geometry are in our index
// this is stored in the column qualifier
val normalizeWeight: (Double) => Double = (weight) => {
val hexCount = topKey.getColumnQualifier.toString
val hexSeparator = hexCount.indexOf(",")
if (hexSeparator == -1) {
weight
} else {
weight / Integer.parseInt(hexCount.substring(0, hexSeparator), 16)
}
}
val baseWeight = getWeight
getWeight = (sf) => normalizeWeight(baseWeight(sf))
// 1 for split plus optional 1 for table sharing
val zPrefix = if (options(TableSharingKey).toBoolean) { 2 } else { 1 }
writeGeom = (_, weight, result) => {
val row = topKey.getRowData
val zOffset = row.offset() + zPrefix
var k = 0
while (k < Z2IndexV1.GEOM_Z_NUM_BYTES) {
zBytes(k) = row.byteAt(zOffset + k)
k += 1
}
val (x, y) = LegacyZ2SFC.invert(Z2(Longs.fromByteArray(zBytes)))
val i = gridSnap.i(x)
val j = gridSnap.j(y)
if (i != -1 && j != -1) {
result(i, j) += weight
}
}
}
result
}
}
object Z2DensityIterator {
val TableSharingKey = "ts"
/**
* Creates an iterator config for the z2 density iterator
*/
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndex,
filter: Option[Filter],
hints: Hints,
priority: Int = KryoLazyDensityIterator.DEFAULT_PRIORITY): IteratorSetting = {
val is = KryoLazyDensityIterator.configure(sft, index, filter, hints, deduplicate = false, priority)
is.setIteratorClass(classOf[Z2DensityIterator].getName)
is.addOption(TableSharingKey, sft.isTableSharing.toString)
is
}
}
|
ddseapy/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/Z2DensityIterator.scala
|
Scala
|
apache-2.0
| 3,529 | 36.956989 | 104 | 0.650893 | false |
/*
Copyright 2019 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jiva
import "flag"
var (
// KubeConfigPath is the path to
// the kubeconfig provided at runtime
KubeConfigPath string
// ReplicaCount is the value of
// replica count provided at runtime
ReplicaCount int
// ReplicaLabel is the label for replica pods
ReplicaLabel = "openebs.io/replica=jiva-replica"
// CtrlLabel is the label for controller pod
CtrlLabel = "openebs.io/controller=jiva-controller"
)
// ParseFlags gets the flag values at run time
func ParseFlags() {
flag.StringVar(&KubeConfigPath, "kubeconfig", "", "path to kubeconfig to invoke kubernetes API calls")
flag.IntVar(&ReplicaCount, "replicas", 1, "value of replica count")
}
|
AmitKumarDas/maya
|
tests/jiva/jiva.go
|
GO
|
apache-2.0
| 1,225 | 34 | 103 | 0.770612 | false |
/*
* Copyright (C) 2017 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.drift.transport.apache;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import io.airlift.drift.transport.apache.client.ApacheThriftConnectionFactoryConfig;
import org.testng.annotations.Test;
import java.util.Map;
import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping;
import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults;
import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults;
public class TestApacheThriftConnectionFactoryConfig
{
@Test
public void testDefaults()
{
assertRecordedDefaults(recordDefaults(ApacheThriftConnectionFactoryConfig.class)
.setThreadCount(null)
.setSocksProxy(null));
}
@Test
public void testExplicitPropertyMappings()
{
Map<String, String> properties = new ImmutableMap.Builder<String, String>()
.put("thrift.client.thread-count", "99")
.put("thrift.client.socks-proxy", "example.com:9876")
.build();
ApacheThriftConnectionFactoryConfig expected = new ApacheThriftConnectionFactoryConfig()
.setThreadCount(99)
.setSocksProxy(HostAndPort.fromParts("example.com", 9876));
assertFullMapping(properties, expected);
}
}
|
electrum/drift
|
drift-transport-apache/src/test/java/io/airlift/drift/transport/apache/TestApacheThriftConnectionFactoryConfig.java
|
Java
|
apache-2.0
| 1,975 | 36.264151 | 96 | 0.728101 | false |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.iosmoe;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.audio.Music;
import com.badlogic.gdx.backends.iosmoe.objectal.OALAudioTrack;
import apple.avfoundation.AVAudioPlayer;
import apple.avfoundation.protocol.AVAudioPlayerDelegate;
public class IOSMusic implements Music {
private final OALAudioTrack track;
OnCompletionListener onCompletionListener;
public IOSMusic (OALAudioTrack track) {
this.track = track;
AVAudioPlayerDelegate delegate = new AVAudioPlayerDelegate() {
@Override
public void audioPlayerDidFinishPlayingSuccessfully (AVAudioPlayer player, boolean flag) {
final OnCompletionListener listener = onCompletionListener;
if (onCompletionListener != null) {
Gdx.app.postRunnable(new Runnable() {
@Override
public void run () {
listener.onCompletion(IOSMusic.this);
}
});
}
}
};
this.track.setDelegate(delegate);
}
@Override
public void play () {
if (track.paused()) {
track.setPaused(false);
} else if (!track.playing()) {
track.play();
}
}
@Override
public void pause () {
if (track.playing()) {
track.setPaused(true);
}
}
@Override
public void stop () {
track.stop();
}
@Override
public boolean isPlaying () {
return track.playing() && !track.paused();
}
@Override
public void setLooping (boolean isLooping) {
track.setNumberOfLoops(isLooping ? -1 : 0);
}
@Override
public boolean isLooping () {
return track.numberOfLoops() == -1;
}
@Override
public void setVolume (float volume) {
track.setVolume(volume);
}
@Override
public void setPosition (float position) {
track.setCurrentTime(position);
}
@Override
public float getPosition () {
return (float)(track.currentTime());
}
@Override
public void dispose () {
track.clear();
track.dealloc();
}
@Override
public float getVolume () {
return track.volume();
}
@Override
public void setPan (float pan, float volume) {
track.setPan(pan);
track.setVolume(volume);
}
@Override
public void setOnCompletionListener (OnCompletionListener listener) {
this.onCompletionListener = listener;
}
}
|
xoppa/libgdx
|
backends/gdx-backend-moe/src/com/badlogic/gdx/backends/iosmoe/IOSMusic.java
|
Java
|
apache-2.0
| 3,034 | 22.868852 | 93 | 0.64733 | false |
////////////////////////////////////////////////////////////////////////////////
// AggregateException
var ss_AggregateException = function#? DEBUG AggregateException$##(message, innerExceptions) {
if (typeof(message) !== 'string') {
innerExceptions = message;
message = 'One or more errors occurred.';
}
innerExceptions = ss.isValue(innerExceptions) ? ss.arrayFromEnumerable(innerExceptions) : null;
ss_Exception.call(this, message, innerExceptions && innerExceptions.length ? innerExceptions[0] : null);
this._innerExceptions = innerExceptions;
};
ss_AggregateException.prototype = {
get_innerExceptions: function#? DEBUG AggregateException$get_innerExceptions##() {
return this._innerExceptions;
}
};
ss.registerClass(global, 'ss.AggregateException', ss_AggregateException, ss_Exception);
|
drysart/SaltarelleCompiler
|
Runtime/CoreLib.Script/AggregateException.js
|
JavaScript
|
apache-2.0
| 810 | 41.526316 | 105 | 0.686881 | false |
// @flow
import * as React from 'react'
import cx from 'classnames'
import { Icon } from '../icons'
import styles from './forms.css'
import type { HoverTooltipHandlers } from '../tooltips'
export type FormGroupProps = {|
/** text label */
label?: string,
/** form content */
children?: React.Node,
/** classes to apply */
className?: ?string,
/** if is included, FormGroup title will use error style. The content of the string is ignored. */
error?: ?string,
/** enable disabled style. Overridden by truthy `error` */
disabled?: ?boolean,
/** handlers for HoverTooltipComponent */
hoverTooltipHandlers?: ?HoverTooltipHandlers,
|}
export function FormGroup(props: FormGroupProps): React.Node {
const error = props.error != null
const className = cx(props.className, {
[styles.error]: error,
[styles.disabled]: !error && props.disabled,
})
return (
<div className={className}>
{props.label && (
<div
{...props.hoverTooltipHandlers}
className={styles.form_group_label}
>
{error && (
<div className={styles.error_icon}>
<Icon name="alert" />
</div>
)}
{props.label}
</div>
)}
{props.children}
</div>
)
}
|
Opentrons/labware
|
components/src/forms/FormGroup.js
|
JavaScript
|
apache-2.0
| 1,283 | 25.729167 | 100 | 0.600935 | false |
# gatsby-transformer-sharp
Creates `ImageSharp` nodes from image types that are supported by the
[Sharp](https://github.com/lovell/sharp) image processing library and provides
fields in their GraphQL types for processing your images in a variety of ways
including resizing, cropping, and creating responsive images.
[Live demo](https://image-processing.gatsbyjs.org/)
([source](https://github.com/gatsbyjs/gatsby/tree/master/examples/image-processing))
## Install
`npm install --save gatsby-transformer-sharp gatsby-plugin-sharp`
## How to use
```javascript
// In your gatsby-config.js
module.exports = {
plugins: [`gatsby-plugin-sharp`, `gatsby-transformer-sharp`],
}
```
Please note that you must have a source plugin (which brings in images) installed in your project. Otherwise, no `ImageSharp` nodes can be created for your files. Examples would be [`gatsby-source-filesystem`](/packages/gatsby-source-filesystem) or source plugins for (headless) CMSs like [`gatsby-source-wordpress`](/packages/gatsby-source-wordpress).
**Note**: An exception to this is when using [`gatsby-source-contentful`](/packages/gatsby-source-contentful/), as the source plugin and the assets are not [downloaded to the local filesystem](https://www.gatsbyjs.org/packages/gatsby-source-contentful/#download-assets-for-static-distribution). By default, the `gatsby-source-contentful` plugin creates a `ContentfulAsset` node for every image with links to Contentful’s CDN, therefore it is not necessary to use `gatsby-transformer-sharp` together with `gatsby-source-contentful`.
## Parsing algorithm
It recognizes files with the following extensions as images.
- jpeg
- jpg
- png
- webp
- tif
- tiff
Each image file is parsed into a node of type `ImageSharp`.
## Configuration options
`checkSupportedExtensions` [boolean][optional]
Sharp only supports certain image formats (see the Parsing algorithm section above) and hence throws a warning when you e.g. use a .gif in an `ImageSharp` query. You'll need to use `publicURL` instead. With this option you can disable the warning behavior.
```javascript
// In your gatsby-config.js
module.exports = {
plugins: [
`gatsby-plugin-sharp`,
{
resolve: `gatsby-transformer-sharp`,
options: {
// The option defaults to true
checkSupportedExtensions: false,
},
},
],
}
```
## Troubleshooting
### Incompatible library version: sharp.node requires version X or later, but Z provides version Y
This means that there are multiple incompatible versions of the `sharp` package installed in `node_modules`. The complete error typically looks like this:
```text
Something went wrong installing the "sharp" module
dlopen(/Users/misiek/dev/gatsby-starter-blog/node_modules/sharp/build/Release/sharp.node, 1): Library not loaded: @rpath/libglib-2.0.dylib
Referenced from: /Users/misiek/dev/gatsby-starter-blog/node_modules/sharp/build/Release/sharp.node
Reason: Incompatible library version: sharp.node requires version 6001.0.0 or later, but libglib-2.0.dylib provides version 5801.0.0
```
To fix this, you'll need to update all Gatsby plugins in the current project that depend on the `sharp` package. Here's a list of official plugins that you might need to update in case your projects uses them:
- `gatsby-plugin-sharp`
- `gatsby-plugin-manifest`
- `gatsby-remark-images-contentful`
- `gatsby-source-contentful`
- `gatsby-transformer-sharp`
- `gatsby-transformer-sqip`
To update these packages, run:
```shell
npm install gatsby-plugin-sharp gatsby-plugin-manifest gatsby-remark-images-contentful gatsby-source-contentful gatsby-transformer-sharp gatsby-transformer-sqip
```
If updating these doesn't fix the issue, your project probably uses other plugins from the community that depend on a different version of `sharp`. Try running `npm list sharp` or `yarn why sharp` to see all packages in the current project that use `sharp` and try updating them as well.
|
BigBoss424/portfolio
|
v8/development/node_modules/gatsby-transformer-sharp/README.md
|
Markdown
|
apache-2.0
| 3,959 | 42.01087 | 531 | 0.765479 | false |
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* Error Connect
*
* Copyright 2015 Armin Novak <[email protected]>
* Copyright 2015 Thincast Technologies GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <freerdp/log.h>
#include "errinfo.h"
#define TAG FREERDP_TAG("core")
#define ERRCONNECT_DEFINE(_code) { ERRCONNECT_##_code , "ERRCONNECT_" #_code , ERRCONNECT_##_code##_STRING }
/* Protocol-independent codes */
#define ERRCONNECT_PRE_CONNECT_FAILED_STRING \
"A configuration error prevented a connection to be established."
#define ERRCONNECT_CONNECT_UNDEFINED_STRING \
"A undefined connection error occurred."
#define ERRCONNECT_POST_CONNECT_FAILED_STRING \
"The connection attempt was aborted due to post connect configuration errors."
#define ERRCONNECT_DNS_ERROR_STRING \
"The DNS entry could not be resolved."
#define ERRCONNECT_DNS_NAME_NOT_FOUND_STRING \
"The DNS host name was not found."
#define ERRCONNECT_CONNECT_FAILED_STRING \
"The connection failed."
#define ERRCONNECT_MCS_CONNECT_INITIAL_ERROR_STRING \
"The connection failed at initial MCS connect"
#define ERRCONNECT_TLS_CONNECT_FAILED_STRING \
"The connection failed at TLS connect."
#define ERRCONNECT_AUTHENTICATION_FAILED_STRING \
"An authentication failure aborted the connection."
#define ERRCONNECT_INSUFFICIENT_PRIVILEGES_STRING \
"Insufficient privileges to establish a connection."
#define ERRCONNECT_CONNECT_CANCELLED_STRING \
"The connection was cancelled."
#define ERRCONNECT_SECURITY_NEGO_CONNECT_FAILED_STRING \
"The connection failed at negotiating security settings."
#define ERRCONNECT_CONNECT_TRANSPORT_FAILED_STRING \
"The connection transport layer failed."
/* Special codes */
#define ERRCONNECT_SUCCESS_STRING "Success."
#define ERRCONNECT_NONE_STRING ""
static const ERRINFO ERRCONNECT_CODES[] =
{
ERRCONNECT_DEFINE(SUCCESS),
ERRCONNECT_DEFINE(PRE_CONNECT_FAILED),
ERRCONNECT_DEFINE(CONNECT_UNDEFINED),
ERRCONNECT_DEFINE(POST_CONNECT_FAILED),
ERRCONNECT_DEFINE(DNS_ERROR),
ERRCONNECT_DEFINE(DNS_NAME_NOT_FOUND),
ERRCONNECT_DEFINE(CONNECT_FAILED),
ERRCONNECT_DEFINE(MCS_CONNECT_INITIAL_ERROR),
ERRCONNECT_DEFINE(TLS_CONNECT_FAILED),
ERRCONNECT_DEFINE(AUTHENTICATION_FAILED),
ERRCONNECT_DEFINE(INSUFFICIENT_PRIVILEGES),
ERRCONNECT_DEFINE(CONNECT_CANCELLED),
ERRCONNECT_DEFINE(SECURITY_NEGO_CONNECT_FAILED),
ERRCONNECT_DEFINE(CONNECT_TRANSPORT_FAILED),
ERRCONNECT_DEFINE(NONE)
};
const char* freerdp_get_error_connect_string(UINT32 code)
{
const ERRINFO* errInfo;
errInfo = &ERRCONNECT_CODES[0];
while (errInfo->code != ERRCONNECT_NONE)
{
if (code == errInfo->code)
{
return errInfo->info;
}
errInfo++;
}
return "ERRCONNECT_UNKNOWN";
}
const char* freerdp_get_error_connect_name(UINT32 code)
{
const ERRINFO* errInfo;
errInfo = &ERRCONNECT_CODES[0];
while (errInfo->code != ERRCONNECT_NONE)
{
if (code == errInfo->code)
{
return errInfo->name;
}
errInfo++;
}
return "ERRCONNECT_UNKNOWN";
}
|
xproax/FreeRDP
|
libfreerdp/core/errconnect.c
|
C
|
apache-2.0
| 3,621 | 25.23913 | 108 | 0.747031 | false |
import React from 'react';
import { CloudProviderRegistry } from './CloudProviderRegistry';
export interface ICloudProviderLabelProps {
provider: string;
}
export interface ICloudProviderLabelState {
label: string;
}
export class CloudProviderLabel extends React.Component<ICloudProviderLabelProps, ICloudProviderLabelState> {
constructor(props: ICloudProviderLabelProps) {
super(props);
this.state = {
label: this.getProviderLabel(props.provider),
};
}
private getProviderLabel(provider: string): string {
return CloudProviderRegistry.getValue(provider, 'name') || provider;
}
public componentWillReceiveProps(nextProps: ICloudProviderLabelProps): void {
this.setState({
label: this.getProviderLabel(nextProps.provider),
});
}
public render() {
return <span>{this.state.label}</span>;
}
}
|
spinnaker/deck
|
packages/core/src/cloudProvider/CloudProviderLabel.tsx
|
TypeScript
|
apache-2.0
| 857 | 24.205882 | 109 | 0.735123 | false |
/* This file was generated by upbc (the upb compiler) from the input
* file:
*
* envoy/config/core/v3/base.proto
*
* Do not edit -- your changes will be discarded when the file is
* regenerated. */
#include <stddef.h>
#include "upb/msg_internal.h"
#include "envoy/config/core/v3/base.upb.h"
#include "envoy/config/core/v3/address.upb.h"
#include "envoy/config/core/v3/backoff.upb.h"
#include "envoy/config/core/v3/http_uri.upb.h"
#include "envoy/type/v3/percent.upb.h"
#include "envoy/type/v3/semantic_version.upb.h"
#include "google/protobuf/any.upb.h"
#include "google/protobuf/struct.upb.h"
#include "google/protobuf/wrappers.upb.h"
#include "xds/core/v3/context_params.upb.h"
#include "envoy/annotations/deprecation.upb.h"
#include "udpa/annotations/migrate.upb.h"
#include "udpa/annotations/status.upb.h"
#include "udpa/annotations/versioning.upb.h"
#include "validate/validate.upb.h"
#include "upb/port_def.inc"
static const upb_MiniTable_Field envoy_config_core_v3_Locality__fields[3] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(16, 32), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Locality_msginit = {
NULL,
&envoy_config_core_v3_Locality__fields[0],
UPB_SIZE(24, 48), 3, upb_ExtMode_NonExtendable, 3, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_BuildVersion_submsgs[2] = {
{.submsg = &envoy_type_v3_SemanticVersion_msginit},
{.submsg = &google_protobuf_Struct_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_BuildVersion__fields[2] = {
{1, UPB_SIZE(4, 8), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 2, 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_BuildVersion_msginit = {
&envoy_config_core_v3_BuildVersion_submsgs[0],
&envoy_config_core_v3_BuildVersion__fields[0],
UPB_SIZE(16, 24), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Extension_submsgs[1] = {
{.submsg = &envoy_config_core_v3_BuildVersion_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Extension__fields[5] = {
{1, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(12, 24), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(20, 40), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{4, UPB_SIZE(28, 56), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{5, UPB_SIZE(1, 1), 0, 0, 8, kUpb_FieldMode_Scalar | (upb_FieldRep_1Byte << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Extension_msginit = {
&envoy_config_core_v3_Extension_submsgs[0],
&envoy_config_core_v3_Extension__fields[0],
UPB_SIZE(32, 64), 5, upb_ExtMode_NonExtendable, 5, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Node_submsgs[6] = {
{.submsg = &envoy_config_core_v3_Address_msginit},
{.submsg = &envoy_config_core_v3_BuildVersion_msginit},
{.submsg = &envoy_config_core_v3_Extension_msginit},
{.submsg = &envoy_config_core_v3_Locality_msginit},
{.submsg = &envoy_config_core_v3_Node_DynamicParametersEntry_msginit},
{.submsg = &google_protobuf_Struct_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Node__fields[11] = {
{1, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(12, 24), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(28, 56), 1, 5, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{4, UPB_SIZE(32, 64), 2, 3, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{6, UPB_SIZE(20, 40), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{7, UPB_SIZE(52, 104), UPB_SIZE(-61, -121), 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{8, UPB_SIZE(52, 104), UPB_SIZE(-61, -121), 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{9, UPB_SIZE(36, 72), 0, 2, 11, kUpb_FieldMode_Array | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{10, UPB_SIZE(40, 80), 0, 0, 9, kUpb_FieldMode_Array | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{11, UPB_SIZE(44, 88), 0, 0, 11, kUpb_FieldMode_Array | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{12, UPB_SIZE(48, 96), 0, 4, 11, kUpb_FieldMode_Map | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Node_msginit = {
&envoy_config_core_v3_Node_submsgs[0],
&envoy_config_core_v3_Node__fields[0],
UPB_SIZE(64, 128), 11, upb_ExtMode_NonExtendable, 4, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Node_DynamicParametersEntry_submsgs[1] = {
{.submsg = &xds_core_v3_ContextParams_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Node_DynamicParametersEntry__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Node_DynamicParametersEntry_msginit = {
&envoy_config_core_v3_Node_DynamicParametersEntry_submsgs[0],
&envoy_config_core_v3_Node_DynamicParametersEntry__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Metadata_submsgs[2] = {
{.submsg = &envoy_config_core_v3_Metadata_FilterMetadataEntry_msginit},
{.submsg = &envoy_config_core_v3_Metadata_TypedFilterMetadataEntry_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Metadata__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 11, kUpb_FieldMode_Map | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(4, 8), 0, 1, 11, kUpb_FieldMode_Map | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Metadata_msginit = {
&envoy_config_core_v3_Metadata_submsgs[0],
&envoy_config_core_v3_Metadata__fields[0],
UPB_SIZE(8, 16), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Metadata_FilterMetadataEntry_submsgs[1] = {
{.submsg = &google_protobuf_Struct_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Metadata_FilterMetadataEntry__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Metadata_FilterMetadataEntry_msginit = {
&envoy_config_core_v3_Metadata_FilterMetadataEntry_submsgs[0],
&envoy_config_core_v3_Metadata_FilterMetadataEntry__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_Metadata_TypedFilterMetadataEntry_submsgs[1] = {
{.submsg = &google_protobuf_Any_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_Metadata_TypedFilterMetadataEntry__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_Metadata_TypedFilterMetadataEntry_msginit = {
&envoy_config_core_v3_Metadata_TypedFilterMetadataEntry_submsgs[0],
&envoy_config_core_v3_Metadata_TypedFilterMetadataEntry__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_RuntimeUInt32__fields[2] = {
{2, UPB_SIZE(0, 0), 0, 0, 13, kUpb_FieldMode_Scalar | (upb_FieldRep_4Byte << upb_FieldRep_Shift)},
{3, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RuntimeUInt32_msginit = {
NULL,
&envoy_config_core_v3_RuntimeUInt32__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 0, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_RuntimePercent_submsgs[1] = {
{.submsg = &envoy_type_v3_Percent_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_RuntimePercent__fields[2] = {
{1, UPB_SIZE(12, 24), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RuntimePercent_msginit = {
&envoy_config_core_v3_RuntimePercent_submsgs[0],
&envoy_config_core_v3_RuntimePercent__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_RuntimeDouble__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 1, kUpb_FieldMode_Scalar | (upb_FieldRep_8Byte << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RuntimeDouble_msginit = {
NULL,
&envoy_config_core_v3_RuntimeDouble__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_RuntimeFeatureFlag_submsgs[1] = {
{.submsg = &google_protobuf_BoolValue_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_RuntimeFeatureFlag__fields[2] = {
{1, UPB_SIZE(12, 24), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RuntimeFeatureFlag_msginit = {
&envoy_config_core_v3_RuntimeFeatureFlag_submsgs[0],
&envoy_config_core_v3_RuntimeFeatureFlag__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_QueryParameter__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_QueryParameter_msginit = {
NULL,
&envoy_config_core_v3_QueryParameter__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_HeaderValue__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_HeaderValue_msginit = {
NULL,
&envoy_config_core_v3_HeaderValue__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_HeaderValueOption_submsgs[2] = {
{.submsg = &envoy_config_core_v3_HeaderValue_msginit},
{.submsg = &google_protobuf_BoolValue_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_HeaderValueOption__fields[3] = {
{1, UPB_SIZE(8, 8), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(12, 16), 2, 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{3, UPB_SIZE(4, 4), 0, 0, 5, kUpb_FieldMode_Scalar | (upb_FieldRep_4Byte << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_HeaderValueOption_msginit = {
&envoy_config_core_v3_HeaderValueOption_submsgs[0],
&envoy_config_core_v3_HeaderValueOption__fields[0],
UPB_SIZE(16, 24), 3, upb_ExtMode_NonExtendable, 3, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_HeaderMap_submsgs[1] = {
{.submsg = &envoy_config_core_v3_HeaderValue_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_HeaderMap__fields[1] = {
{1, UPB_SIZE(0, 0), 0, 0, 11, kUpb_FieldMode_Array | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_HeaderMap_msginit = {
&envoy_config_core_v3_HeaderMap_submsgs[0],
&envoy_config_core_v3_HeaderMap__fields[0],
UPB_SIZE(8, 8), 1, upb_ExtMode_NonExtendable, 1, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_WatchedDirectory__fields[1] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_WatchedDirectory_msginit = {
NULL,
&envoy_config_core_v3_WatchedDirectory__fields[0],
UPB_SIZE(8, 16), 1, upb_ExtMode_NonExtendable, 1, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_DataSource__fields[4] = {
{1, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{2, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 12, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{4, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_DataSource_msginit = {
NULL,
&envoy_config_core_v3_DataSource__fields[0],
UPB_SIZE(16, 32), 4, upb_ExtMode_NonExtendable, 4, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_RetryPolicy_submsgs[2] = {
{.submsg = &envoy_config_core_v3_BackoffStrategy_msginit},
{.submsg = &google_protobuf_UInt32Value_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_RetryPolicy__fields[2] = {
{1, UPB_SIZE(4, 8), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(8, 16), 2, 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RetryPolicy_msginit = {
&envoy_config_core_v3_RetryPolicy_submsgs[0],
&envoy_config_core_v3_RetryPolicy__fields[0],
UPB_SIZE(16, 24), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_RemoteDataSource_submsgs[2] = {
{.submsg = &envoy_config_core_v3_HttpUri_msginit},
{.submsg = &envoy_config_core_v3_RetryPolicy_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_RemoteDataSource__fields[3] = {
{1, UPB_SIZE(12, 24), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(16, 32), 2, 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RemoteDataSource_msginit = {
&envoy_config_core_v3_RemoteDataSource_submsgs[0],
&envoy_config_core_v3_RemoteDataSource__fields[0],
UPB_SIZE(24, 48), 3, upb_ExtMode_NonExtendable, 3, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_AsyncDataSource_submsgs[2] = {
{.submsg = &envoy_config_core_v3_DataSource_msginit},
{.submsg = &envoy_config_core_v3_RemoteDataSource_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_AsyncDataSource__fields[2] = {
{1, UPB_SIZE(0, 0), UPB_SIZE(-5, -9), 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(0, 0), UPB_SIZE(-5, -9), 1, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_AsyncDataSource_msginit = {
&envoy_config_core_v3_AsyncDataSource_submsgs[0],
&envoy_config_core_v3_AsyncDataSource__fields[0],
UPB_SIZE(8, 16), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_TransportSocket_submsgs[1] = {
{.submsg = &google_protobuf_Any_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_TransportSocket__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
{3, UPB_SIZE(8, 16), UPB_SIZE(-13, -25), 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_TransportSocket_msginit = {
&envoy_config_core_v3_TransportSocket_submsgs[0],
&envoy_config_core_v3_TransportSocket__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 1, 255, 0,
};
static const upb_MiniTable_Sub envoy_config_core_v3_RuntimeFractionalPercent_submsgs[1] = {
{.submsg = &envoy_type_v3_FractionalPercent_msginit},
};
static const upb_MiniTable_Field envoy_config_core_v3_RuntimeFractionalPercent__fields[2] = {
{1, UPB_SIZE(12, 24), 1, 0, 11, kUpb_FieldMode_Scalar | (upb_FieldRep_Pointer << upb_FieldRep_Shift)},
{2, UPB_SIZE(4, 8), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_RuntimeFractionalPercent_msginit = {
&envoy_config_core_v3_RuntimeFractionalPercent_submsgs[0],
&envoy_config_core_v3_RuntimeFractionalPercent__fields[0],
UPB_SIZE(16, 32), 2, upb_ExtMode_NonExtendable, 2, 255, 0,
};
static const upb_MiniTable_Field envoy_config_core_v3_ControlPlane__fields[1] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, kUpb_FieldMode_Scalar | (upb_FieldRep_StringView << upb_FieldRep_Shift)},
};
const upb_MiniTable envoy_config_core_v3_ControlPlane_msginit = {
NULL,
&envoy_config_core_v3_ControlPlane__fields[0],
UPB_SIZE(8, 16), 1, upb_ExtMode_NonExtendable, 1, 255, 0,
};
static const upb_MiniTable *messages_layout[24] = {
&envoy_config_core_v3_Locality_msginit,
&envoy_config_core_v3_BuildVersion_msginit,
&envoy_config_core_v3_Extension_msginit,
&envoy_config_core_v3_Node_msginit,
&envoy_config_core_v3_Node_DynamicParametersEntry_msginit,
&envoy_config_core_v3_Metadata_msginit,
&envoy_config_core_v3_Metadata_FilterMetadataEntry_msginit,
&envoy_config_core_v3_Metadata_TypedFilterMetadataEntry_msginit,
&envoy_config_core_v3_RuntimeUInt32_msginit,
&envoy_config_core_v3_RuntimePercent_msginit,
&envoy_config_core_v3_RuntimeDouble_msginit,
&envoy_config_core_v3_RuntimeFeatureFlag_msginit,
&envoy_config_core_v3_QueryParameter_msginit,
&envoy_config_core_v3_HeaderValue_msginit,
&envoy_config_core_v3_HeaderValueOption_msginit,
&envoy_config_core_v3_HeaderMap_msginit,
&envoy_config_core_v3_WatchedDirectory_msginit,
&envoy_config_core_v3_DataSource_msginit,
&envoy_config_core_v3_RetryPolicy_msginit,
&envoy_config_core_v3_RemoteDataSource_msginit,
&envoy_config_core_v3_AsyncDataSource_msginit,
&envoy_config_core_v3_TransportSocket_msginit,
&envoy_config_core_v3_RuntimeFractionalPercent_msginit,
&envoy_config_core_v3_ControlPlane_msginit,
};
const upb_MiniTable_File envoy_config_core_v3_base_proto_upb_file_layout = {
messages_layout,
NULL,
NULL,
24,
0,
0,
};
#include "upb/port_undef.inc"
|
grpc/grpc
|
src/core/ext/upb-generated/envoy/config/core/v3/base.upb.c
|
C
|
apache-2.0
| 19,356 | 45.195704 | 125 | 0.711097 | false |
/*
* Copyright (C) 2007 Júlio Vilmar Gesser.
*
* This file is part of Java 1.5 parser and Abstract Syntax Tree.
*
* Java 1.5 parser and Abstract Syntax Tree is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Java 1.5 parser and Abstract Syntax Tree is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Java 1.5 parser and Abstract Syntax Tree. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Created on 03/11/2006
*/
package japa.parser.ast.stmt;
import japa.parser.ast.Node;
/**
* @author Julio Vilmar Gesser
*/
public abstract class Statement extends Node {
public Statement() {
}
public Statement(final int beginLine, final int beginColumn, final int endLine, final int endColumn) {
super(beginLine, beginColumn, endLine, endColumn);
}
}
|
ftomassetti/effectivejava
|
test-resources/sample-codebases/javaparser/japa/parser/ast/stmt/Statement.java
|
Java
|
apache-2.0
| 1,281 | 31.684211 | 103 | 0.714844 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.zip.GZIPInputStream;
/**
* Utilities for {@link Source}.
*/
public abstract class Sources {
private Sources() {}
public static Source of(File file) {
return new FileSource(file);
}
public static Source of(URL url) {
return new FileSource(url);
}
public static Source file(File baseDirectory, String fileName) {
final File file = new File(fileName);
if (baseDirectory != null && !file.isAbsolute()) {
return of(new File(baseDirectory, fileName));
} else {
return of(file);
}
}
public static Source url(String url) {
try {
return of(new URL(url));
} catch (MalformedURLException | IllegalArgumentException e) {
throw new RuntimeException("Malformed URL: '" + url + "'", e);
}
}
/** Looks for a suffix on a path and returns
* either the path with the suffix removed
* or null. */
private static String trimOrNull(String s, String suffix) {
return s.endsWith(suffix)
? s.substring(0, s.length() - suffix.length())
: null;
}
private static boolean isFile(Source source) {
return source.protocol().equals("file");
}
/** Implementation of {@link Source}. */
private static class FileSource implements Source {
private final File file;
private final URL url;
private FileSource(URL url) {
this.url = Objects.requireNonNull(url);
this.file = urlToFile(url);
}
private FileSource(File file) {
this.file = Objects.requireNonNull(file);
this.url = null;
}
private File urlToFile(URL url) {
if (!"file".equals(url.getProtocol())) {
return null;
}
URI uri;
try {
uri = url.toURI();
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Unable to convert URL " + url + " to URI", e);
}
if (uri.isOpaque()) {
// It is like file:test%20file.c++
// getSchemeSpecificPart would return "test file.c++"
return new File(uri.getSchemeSpecificPart());
}
// See https://stackoverflow.com/a/17870390/1261287
return Paths.get(uri).toFile();
}
@Override public String toString() {
return (url != null ? url : file).toString();
}
public URL url() {
if (url == null) {
throw new UnsupportedOperationException();
}
return url;
}
public File file() {
if (file == null) {
throw new UnsupportedOperationException();
}
return file;
}
public String protocol() {
return file != null ? "file" : url.getProtocol();
}
public String path() {
if (file != null) {
return file.getPath();
}
try {
// Decode %20 and friends
return url.toURI().getSchemeSpecificPart();
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Unable to convert URL " + url + " to URI", e);
}
}
public Reader reader() throws IOException {
final InputStream is;
if (path().endsWith(".gz")) {
final InputStream fis = openStream();
is = new GZIPInputStream(fis);
} else {
is = openStream();
}
return new InputStreamReader(is, StandardCharsets.UTF_8);
}
public InputStream openStream() throws IOException {
if (file != null) {
return new FileInputStream(file);
} else {
return url.openStream();
}
}
public Source trim(String suffix) {
Source x = trimOrNull(suffix);
return x == null ? this : x;
}
public Source trimOrNull(String suffix) {
if (url != null) {
final String s = Sources.trimOrNull(url.toExternalForm(), suffix);
return s == null ? null : Sources.url(s);
} else {
final String s = Sources.trimOrNull(file.getPath(), suffix);
return s == null ? null : of(new File(s));
}
}
public Source append(Source child) {
if (isFile(child)) {
if (child.file().isAbsolute()) {
return child;
}
} else {
try {
URI uri = child.url().toURI();
if (!uri.isOpaque()) {
// The URL is "absolute" (it starts with a slash)
return child;
}
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Unable to convert URL " + child.url() + " to URI", e);
}
}
String path = child.path();
if (url != null) {
String encodedPath = new File(".").toURI().relativize(new File(path).toURI())
.getRawSchemeSpecificPart();
return Sources.url(url + "/" + encodedPath);
} else {
return Sources.file(file, path);
}
}
public Source relative(Source parent) {
if (isFile(parent)) {
if (isFile(this)
&& file.getPath().startsWith(parent.file().getPath())) {
String rest = file.getPath().substring(parent.file().getPath().length());
if (rest.startsWith(File.separator)) {
return Sources.file(null, rest.substring(File.separator.length()));
}
}
return this;
} else {
if (!isFile(this)) {
String rest = Sources.trimOrNull(url.toExternalForm(),
parent.url().toExternalForm());
if (rest != null
&& rest.startsWith("/")) {
return Sources.file(null, rest.substring(1));
}
}
return this;
}
}
}
}
// End Sources.java
|
arina-ielchiieva/calcite
|
core/src/main/java/org/apache/calcite/util/Sources.java
|
Java
|
apache-2.0
| 6,730 | 28.134199 | 100 | 0.606389 | false |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package precis
import (
"golang.org/x/text/cases"
"golang.org/x/text/language"
"golang.org/x/text/runes"
"golang.org/x/text/transform"
"golang.org/x/text/unicode/norm"
"golang.org/x/text/width"
)
// An Option is used to define the behavior and rules of a Profile.
type Option func(*options)
type options struct {
// Preparation options
foldWidth bool
// Enforcement options
cases transform.Transformer
disallow runes.Set
norm norm.Form
additional []func() transform.Transformer
width *width.Transformer
disallowEmpty bool
bidiRule bool
// Comparison options
ignorecase bool
}
func getOpts(o ...Option) (res options) {
for _, f := range o {
f(&res)
}
return
}
var (
// The IgnoreCase option causes the profile to perform a case insensitive
// comparison during the PRECIS comparison step.
IgnoreCase Option = ignoreCase
// The FoldWidth option causes the profile to map non-canonical wide and
// narrow variants to their decomposition mapping. This is useful for
// profiles that are based on the identifier class which would otherwise
// disallow such characters.
FoldWidth Option = foldWidth
// The DisallowEmpty option causes the enforcement step to return an error if
// the resulting string would be empty.
DisallowEmpty Option = disallowEmpty
// The BidiRule option causes the Bidi Rule defined in RFC 5893 to be
// applied.
BidiRule Option = bidiRule
)
var (
ignoreCase = func(o *options) {
o.ignorecase = true
}
foldWidth = func(o *options) {
o.foldWidth = true
}
disallowEmpty = func(o *options) {
o.disallowEmpty = true
}
bidiRule = func(o *options) {
o.bidiRule = true
}
)
// The AdditionalMapping option defines the additional mapping rule for the
// Profile by applying Transformer's in sequence.
func AdditionalMapping(t ...func() transform.Transformer) Option {
return func(o *options) {
o.additional = t
}
}
// The Norm option defines a Profile's normalization rule. Defaults to NFC.
func Norm(f norm.Form) Option {
return func(o *options) {
o.norm = f
}
}
// The FoldCase option defines a Profile's case mapping rule. Options can be
// provided to determine the type of case folding used.
func FoldCase(opts ...cases.Option) Option {
return func(o *options) {
o.cases = cases.Fold(opts...)
}
}
// The LowerCase option defines a Profile's case mapping rule. Options can be
// provided to determine the type of case folding used.
func LowerCase(opts ...cases.Option) Option {
return func(o *options) {
if len(opts) == 0 {
o.cases = cases.Lower(language.Und, cases.HandleFinalSigma(false))
return
}
opts = append([]cases.Option{cases.HandleFinalSigma(false)}, opts...)
o.cases = cases.Lower(language.Und, opts...)
}
}
// The Disallow option further restricts a Profile's allowed characters beyond
// what is disallowed by the underlying string class.
func Disallow(set runes.Set) Option {
return func(o *options) {
o.disallow = set
}
}
|
fabzo/pget
|
vendor/golang.org/x/text/secure/precis/options.go
|
GO
|
apache-2.0
| 3,149 | 25.024793 | 78 | 0.718006 | false |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.restart;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.DataStreamTestHelper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.test.StreamsUtils;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase;
import org.elasticsearch.xcontent.DeprecationHandler;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.ObjectPath;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.core.slm.SnapshotLifecyclePolicy;
import org.elasticsearch.xpack.core.slm.SnapshotLifecycleStats;
import org.hamcrest.Matcher;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import static org.elasticsearch.core.TimeValue.timeValueSeconds;
import static org.elasticsearch.upgrades.FullClusterRestartIT.assertNumHits;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
public static final int UPGRADE_FIELD_EXPECTED_INDEX_FORMAT_VERSION = 6;
public static final int SECURITY_EXPECTED_INDEX_FORMAT_VERSION = 6;
@Override
protected Settings restClientSettings() {
String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
return Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token)
// we increase the timeout here to 90 seconds to handle long waits for a green
// cluster health. the waits for green need to be longer than a minute to
// account for delayed shards
.put(ESRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s")
.build();
}
/**
* Tests that a single document survives. Super basic smoke test.
*/
public void testSingleDoc() throws IOException {
String docLocation = "/testsingledoc/_doc/1";
String doc = "{\"test\": \"test\"}";
if (isRunningAgainstOldCluster()) {
Request createDoc = new Request("PUT", docLocation);
createDoc.addParameter("refresh", "true");
createDoc.setJsonEntity(doc);
client().performRequest(createDoc);
}
Request getRequest = new Request("GET", docLocation);
assertThat(toStr(client().performRequest(getRequest)), containsString(doc));
}
public void testSecurityNativeRealm() throws Exception {
if (isRunningAgainstOldCluster()) {
createUser(true);
createRole(true);
} else {
waitForYellow(".security");
final Request getSettingsRequest = new Request("GET", "/.security/_settings/index.format");
getSettingsRequest.setOptions(
expectWarnings(
"this request accesses system indices: [.security-7], but in a future major "
+ "version, direct access to system indices will be prevented by default"
)
);
Response settingsResponse = client().performRequest(getSettingsRequest);
Map<String, Object> settingsResponseMap = entityAsMap(settingsResponse);
logger.info("settings response map {}", settingsResponseMap);
final String concreteSecurityIndex;
if (settingsResponseMap.isEmpty()) {
fail("The security index does not have the expected setting [index.format]");
} else {
concreteSecurityIndex = settingsResponseMap.keySet().iterator().next();
Map<?, ?> indexSettingsMap = (Map<?, ?>) settingsResponseMap.get(concreteSecurityIndex);
Map<?, ?> settingsMap = (Map<?, ?>) indexSettingsMap.get("settings");
logger.info("settings map {}", settingsMap);
if (settingsMap.containsKey("index")) {
int format = Integer.parseInt(String.valueOf(((Map<?, ?>) settingsMap.get("index")).get("format")));
assertEquals("The security index needs to be upgraded", SECURITY_EXPECTED_INDEX_FORMAT_VERSION, format);
}
}
// create additional user and role
createUser(false);
createRole(false);
}
assertUserInfo(isRunningAgainstOldCluster());
assertRoleInfo(isRunningAgainstOldCluster());
}
public void testWatcher() throws Exception {
if (isRunningAgainstOldCluster()) {
logger.info("Adding a watch on old cluster {}", getOldClusterVersion());
Request createBwcWatch = new Request("PUT", "/_watcher/watch/bwc_watch");
createBwcWatch.setJsonEntity(loadWatch("simple-watch.json"));
client().performRequest(createBwcWatch);
logger.info("Adding a watch with \"fun\" throttle periods on old cluster");
Request createBwcThrottlePeriod = new Request("PUT", "/_watcher/watch/bwc_throttle_period");
createBwcThrottlePeriod.setJsonEntity(loadWatch("throttle-period-watch.json"));
client().performRequest(createBwcThrottlePeriod);
logger.info("Adding a watch with \"fun\" read timeout on old cluster");
Request createFunnyTimeout = new Request("PUT", "/_watcher/watch/bwc_funny_timeout");
createFunnyTimeout.setJsonEntity(loadWatch("funny-timeout-watch.json"));
client().performRequest(createFunnyTimeout);
logger.info("Waiting for watch results index to fill up...");
try {
waitForYellow(".watches,bwc_watch_index,.watcher-history*");
} catch (ResponseException e) {
{
String rsp = toStr(client().performRequest(new Request("GET", "/_cluster/state")));
logger.info("cluster_state_response=\n{}", rsp);
}
{
Request request = new Request("GET", "/_watcher/stats/_all");
request.addParameter("emit_stacktraces", "true");
String rsp = toStr(client().performRequest(request));
logger.info("watcher_stats_response=\n{}", rsp);
}
throw e;
}
waitForHits("bwc_watch_index", 2);
waitForHits(".watcher-history*", 2);
logger.info("Done creating watcher-related indices");
} else {
logger.info("testing against {}", getOldClusterVersion());
try {
waitForYellow(".watches,bwc_watch_index,.watcher-history*");
} catch (ResponseException e) {
String rsp = toStr(client().performRequest(new Request("GET", "/_cluster/state")));
logger.info("cluster_state_response=\n{}", rsp);
throw e;
}
logger.info("checking that the Watches index is the correct version");
// Verify .watches index format:
var getClusterStateResponse = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state/metadata/.watches")));
Map<?, ?> indices = ObjectPath.eval("metadata.indices", getClusterStateResponse);
var dotWatchesIndex = indices.get(".watches"); // ObjectPath.eval(...) doesn't handle keys containing .
var indexFormat = Integer.parseInt(ObjectPath.eval("settings.index.format", dotWatchesIndex));
assertEquals("The watches index needs to be upgraded", UPGRADE_FIELD_EXPECTED_INDEX_FORMAT_VERSION, indexFormat);
// Wait for watcher to actually start....
startWatcher();
try {
assertOldTemplatesAreDeleted();
assertWatchIndexContentsWork();
assertBasicWatchInteractions();
} finally {
/* Shut down watcher after every test because watcher can be a bit finicky about shutting down when the node shuts
* down. This makes super sure it shuts down *and* causes the test to fail in a sensible spot if it doesn't shut down.
*/
stopWatcher();
}
}
}
@SuppressWarnings("unchecked")
public void testWatcherWithApiKey() throws Exception {
final Request getWatchStatusRequest = new Request("GET", "/_watcher/watch/watch_with_api_key");
if (isRunningAgainstOldCluster()) {
final Request createApiKeyRequest = new Request("PUT", "/_security/api_key");
createApiKeyRequest.setJsonEntity(
"{\"name\":\"key-1\",\"role_descriptors\":"
+ "{\"r\":{\"cluster\":[\"all\"],\"indices\":[{\"names\":[\"*\"],\"privileges\":[\"all\"]}]}}}"
);
final Response response = client().performRequest(createApiKeyRequest);
final Map<String, Object> createApiKeyResponse = entityAsMap(response);
Request createWatchWithApiKeyRequest = new Request("PUT", "/_watcher/watch/watch_with_api_key");
createWatchWithApiKeyRequest.setJsonEntity(loadWatch("logging-watch.json"));
final byte[] keyBytes = (createApiKeyResponse.get("id") + ":" + createApiKeyResponse.get("api_key")).getBytes(
StandardCharsets.UTF_8
);
final String authHeader = "ApiKey " + Base64.getEncoder().encodeToString(keyBytes);
createWatchWithApiKeyRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", authHeader));
client().performRequest(createWatchWithApiKeyRequest);
assertBusy(() -> {
final Map<String, Object> getWatchStatusResponse = entityAsMap(client().performRequest(getWatchStatusRequest));
final Map<String, Object> status = (Map<String, Object>) getWatchStatusResponse.get("status");
assertEquals("executed", status.get("execution_state"));
});
} else {
logger.info("testing against {}", getOldClusterVersion());
try {
waitForYellow(".watches,.watcher-history*");
} catch (ResponseException e) {
String rsp = toStr(client().performRequest(new Request("GET", "/_cluster/state")));
logger.info("cluster_state_response=\n{}", rsp);
throw e;
}
// Wait for watcher to actually start....
startWatcher();
try {
final Map<String, Object> getWatchStatusResponse = entityAsMap(client().performRequest(getWatchStatusRequest));
final Map<String, Object> status = (Map<String, Object>) getWatchStatusResponse.get("status");
final int version = (int) status.get("version");
final AtomicBoolean versionIncreased = new AtomicBoolean();
final AtomicBoolean executed = new AtomicBoolean();
assertBusy(() -> {
final Map<String, Object> newGetWatchStatusResponse = entityAsMap(client().performRequest(getWatchStatusRequest));
final Map<String, Object> newStatus = (Map<String, Object>) newGetWatchStatusResponse.get("status");
if (false == versionIncreased.get() && version < (int) newStatus.get("version")) {
versionIncreased.set(true);
}
if (false == executed.get() && "executed".equals(newStatus.get("execution_state"))) {
executed.set(true);
}
assertThat(
"version increased: [" + versionIncreased.get() + "], executed: [" + executed.get() + "]",
versionIncreased.get() && executed.get(),
is(true)
);
});
} finally {
stopWatcher();
}
}
}
public void testServiceAccountApiKey() throws IOException {
assumeTrue("no service accounts in versions before " + Version.V_7_13_0, getOldClusterVersion().onOrAfter(Version.V_7_13_0));
if (isRunningAgainstOldCluster()) {
final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token");
final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest);
assertOK(createServiceTokenResponse);
@SuppressWarnings("unchecked")
final String serviceToken = ((Map<String, String>) responseAsMap(createServiceTokenResponse).get("token")).get("value");
final Request createApiKeyRequest = new Request("PUT", "/_security/api_key");
createApiKeyRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + serviceToken));
createApiKeyRequest.setJsonEntity("{\"name\":\"key-1\"}");
final Response createApiKeyResponse = client().performRequest(createApiKeyRequest);
final Map<String, Object> createApiKeyResponseMap = entityAsMap(createApiKeyResponse);
final String authHeader = "ApiKey "
+ Base64.getEncoder()
.encodeToString(
(createApiKeyResponseMap.get("id") + ":" + createApiKeyResponseMap.get("api_key")).getBytes(StandardCharsets.UTF_8)
);
final Request indexRequest = new Request("PUT", "/api_keys/_doc/key-1");
indexRequest.setJsonEntity("{\"auth_header\":\"" + authHeader + "\"}");
assertOK(client().performRequest(indexRequest));
} else {
final Request getRequest = new Request("GET", "/api_keys/_doc/key-1");
final Response getResponse = client().performRequest(getRequest);
assertOK(getResponse);
final Map<String, Object> getResponseMap = responseAsMap(getResponse);
@SuppressWarnings("unchecked")
final String authHeader = ((Map<String, String>) getResponseMap.get("_source")).get("auth_header");
final Request mainRequest = new Request("GET", "/");
mainRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", authHeader));
assertOK(client().performRequest(mainRequest));
final Request getUserRequest = new Request("GET", "/_security/user");
getUserRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", authHeader));
final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(getUserRequest));
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(e.getMessage(), containsString("is unauthorized"));
}
}
/**
* Tests that a RollUp job created on a old cluster is correctly restarted after the upgrade.
*/
public void testRollupAfterRestart() throws Exception {
if (isRunningAgainstOldCluster()) {
final int numDocs = 59;
final int year = randomIntBetween(1970, 2018);
// index documents for the rollup job
final StringBuilder bulk = new StringBuilder();
for (int i = 0; i < numDocs; i++) {
bulk.append("{\"index\":{\"_index\":\"rollup-docs\"}}\n");
String date = String.format(Locale.ROOT, "%04d-01-01T00:%02d:00Z", year, i);
bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
}
bulk.append("\r\n");
final Request bulkRequest = new Request("POST", "/_bulk");
bulkRequest.setJsonEntity(bulk.toString());
client().performRequest(bulkRequest);
// create the rollup job
final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test");
String intervalType;
if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) {
intervalType = "fixed_interval";
} else {
intervalType = "interval";
}
createRollupJobRequest.setJsonEntity(
"{"
+ "\"index_pattern\":\"rollup-*\","
+ "\"rollup_index\":\"results-rollup\","
+ "\"cron\":\"*/30 * * * * ?\","
+ "\"page_size\":100,"
+ "\"groups\":{"
+ " \"date_histogram\":{"
+ " \"field\":\"timestamp\","
+ " \""
+ intervalType
+ "\":\"5m\""
+ " }"
+ "},"
+ "\"metrics\":["
+ " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}"
+ "]"
+ "}"
);
Map<String, Object> createRollupJobResponse = entityAsMap(client().performRequest(createRollupJobRequest));
assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));
// start the rollup job
final Request startRollupJobRequest = new Request("POST", "/_rollup/job/rollup-job-test/_start");
Map<String, Object> startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest));
assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));
assertRollUpJob("rollup-job-test");
} else {
final Request clusterHealthRequest = new Request("GET", "/_cluster/health");
clusterHealthRequest.addParameter("wait_for_status", "yellow");
clusterHealthRequest.addParameter("wait_for_no_relocating_shards", "true");
clusterHealthRequest.addParameter("wait_for_no_initializing_shards", "true");
Map<String, Object> clusterHealthResponse = entityAsMap(client().performRequest(clusterHealthRequest));
assertThat(clusterHealthResponse.get("timed_out"), equalTo(Boolean.FALSE));
assertRollUpJob("rollup-job-test");
}
}
public void testTransformLegacyTemplateCleanup() throws Exception {
assumeTrue("Before 7.2 transforms didn't exist", getOldClusterVersion().onOrAfter(Version.V_7_2_0));
if (isRunningAgainstOldCluster()) {
// create the source index
final Request createIndexRequest = new Request("PUT", "customers");
createIndexRequest.setJsonEntity(
"{"
+ "\"mappings\": {"
+ " \"properties\": {"
+ " \"customer_id\": { \"type\": \"keyword\" },"
+ " \"price\": { \"type\": \"double\" }"
+ " }"
+ "}"
+ "}"
);
Map<String, Object> createIndexResponse = entityAsMap(client().performRequest(createIndexRequest));
assertThat(createIndexResponse.get("acknowledged"), equalTo(Boolean.TRUE));
// create a transform
String endpoint = getOldClusterVersion().onOrAfter(Version.V_7_5_0)
? "_transform/transform-full-cluster-restart-test"
: "_data_frame/transforms/transform-full-cluster-restart-test";
final Request createTransformRequest = new Request("PUT", endpoint);
createTransformRequest.setJsonEntity(
"{"
+ "\"source\":{"
+ " \"index\":\"customers\""
+ "},"
+ "\"description\":\"testing\","
+ "\"dest\":{"
+ " \"index\":\"max_price\""
+ "},"
+ "\"pivot\": {"
+ " \"group_by\":{"
+ " \"customer_id\":{"
+ " \"terms\":{"
+ " \"field\":\"customer_id\""
+ " }"
+ " }"
+ " },"
+ " \"aggregations\":{"
+ " \"max_price\":{"
+ " \"max\":{"
+ " \"field\":\"price\""
+ " }"
+ " }"
+ " }"
+ "}"
+ "}"
);
Map<String, Object> createTransformResponse = entityAsMap(client().performRequest(createTransformRequest));
assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
} else {
// legacy index templates created in previous releases should not be present anymore
assertBusy(() -> {
Request request = new Request("GET", "/_template/.transform-*,.data-frame-*");
try {
Response response = client().performRequest(request);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
assertThat(responseLevel.keySet(), empty());
} catch (ResponseException e) {
// not found is fine
assertThat(
"Unexpected failure getting templates: " + e.getResponse().getStatusLine(),
e.getResponse().getStatusLine().getStatusCode(),
is(404)
);
}
});
}
}
public void testSlmPolicyAndStats() throws IOException {
SnapshotLifecyclePolicy slmPolicy = new SnapshotLifecyclePolicy(
"test-policy",
"test-policy",
"* * * 31 FEB ? *",
"test-repo",
Collections.singletonMap("indices", Collections.singletonList("*")),
null
);
if (isRunningAgainstOldCluster() && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) {
Request createRepoRequest = new Request("PUT", "_snapshot/test-repo");
String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}";
createRepoRequest.setJsonEntity(repoCreateJson);
Request createSlmPolicyRequest = new Request("PUT", "_slm/policy/test-policy");
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
String createSlmPolicyJson = Strings.toString(slmPolicy.toXContent(builder, null));
createSlmPolicyRequest.setJsonEntity(createSlmPolicyJson);
}
client().performRequest(createRepoRequest);
client().performRequest(createSlmPolicyRequest);
}
if (isRunningAgainstOldCluster() == false && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) {
Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy");
Response response = client().performRequest(getSlmPolicyRequest);
Map<String, Object> responseMap = entityAsMap(response);
Map<?, ?> policy = (Map<?, ?>) ((Map<?, ?>) responseMap.get("test-policy")).get("policy");
assertEquals(slmPolicy.getName(), policy.get("name"));
assertEquals(slmPolicy.getRepository(), policy.get("repository"));
assertEquals(slmPolicy.getSchedule(), policy.get("schedule"));
assertEquals(slmPolicy.getConfig(), policy.get("config"));
}
if (isRunningAgainstOldCluster() == false) {
Response response = client().performRequest(new Request("GET", "_slm/stats"));
XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue());
try (
XContentParser parser = xContentType.xContent()
.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
response.getEntity().getContent()
)
) {
assertEquals(new SnapshotLifecycleStats(), SnapshotLifecycleStats.parse(parser));
}
}
}
private String loadWatch(String watch) throws IOException {
return StreamsUtils.copyToStringFromClasspath("/org/elasticsearch/xpack/restart/" + watch);
}
private void assertOldTemplatesAreDeleted() throws IOException {
Map<String, Object> templates = entityAsMap(client().performRequest(new Request("GET", "/_template")));
assertThat(templates.keySet(), not(hasItems(is("watches"), startsWith("watch-history"), is("triggered_watches"))));
}
private void assertWatchIndexContentsWork() throws Exception {
// Fetch a basic watch
Request getRequest = new Request("GET", "_watcher/watch/bwc_watch");
Map<String, Object> bwcWatch = entityAsMap(client().performRequest(getRequest));
logger.error("-----> {}", bwcWatch);
assertThat(bwcWatch.get("found"), equalTo(true));
Map<?, ?> source = (Map<?, ?>) bwcWatch.get("watch");
assertEquals(1000, source.get("throttle_period_in_millis"));
int timeout = (int) timeValueSeconds(100).millis();
assertThat(ObjectPath.eval("input.search.timeout_in_millis", source), equalTo(timeout));
assertThat(ObjectPath.eval("actions.index_payload.transform.search.timeout_in_millis", source), equalTo(timeout));
assertThat(ObjectPath.eval("actions.index_payload.index.index", source), equalTo("bwc_watch_index"));
assertThat(ObjectPath.eval("actions.index_payload.index.timeout_in_millis", source), equalTo(timeout));
// Fetch a watch with "fun" throttle periods
getRequest = new Request("GET", "_watcher/watch/bwc_throttle_period");
bwcWatch = entityAsMap(client().performRequest(getRequest));
assertThat(bwcWatch.get("found"), equalTo(true));
source = (Map<?, ?>) bwcWatch.get("watch");
assertEquals(timeout, source.get("throttle_period_in_millis"));
assertThat(ObjectPath.eval("actions.index_payload.throttle_period_in_millis", source), equalTo(timeout));
/*
* Fetch a watch with a funny timeout to verify loading fractional time
* values.
*/
bwcWatch = entityAsMap(client().performRequest(new Request("GET", "_watcher/watch/bwc_funny_timeout")));
assertThat(bwcWatch.get("found"), equalTo(true));
source = (Map<?, ?>) bwcWatch.get("watch");
Map<String, Object> attachments = ObjectPath.eval("actions.work.email.attachments", source);
Map<?, ?> attachment = (Map<?, ?>) attachments.get("test_report.pdf");
Map<String, Object> request = ObjectPath.eval("http.request", attachment);
assertEquals(timeout, request.get("read_timeout_millis"));
assertEquals("https", request.get("scheme"));
assertEquals("example.com", request.get("host"));
assertEquals("{{ctx.metadata.report_url}}", request.get("path"));
assertEquals(8443, request.get("port"));
Map<String, String> basic = ObjectPath.eval("auth.basic", request);
assertThat(basic, hasEntry("username", "Aladdin"));
// password doesn't come back because it is hidden
assertThat(basic, hasEntry(is("password"), anyOf(startsWith("::es_encrypted::"), is("::es_redacted::"))));
Request searchRequest = new Request("GET", ".watcher-history*/_search");
if (isRunningAgainstOldCluster() == false) {
searchRequest.addParameter(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, "true");
}
Map<String, Object> history = entityAsMap(client().performRequest(searchRequest));
Map<?, ?> hits = (Map<?, ?>) history.get("hits");
assertThat((Integer) hits.get("total"), greaterThanOrEqualTo(2));
}
private void assertBasicWatchInteractions() throws Exception {
String watch = "{\"trigger\":{\"schedule\":{\"interval\":\"1s\"}},\"input\":{\"none\":{}},"
+ "\"condition\":{\"always\":{}},"
+ "\"actions\":{\"awesome\":{\"logging\":{\"level\":\"info\",\"text\":\"test\"}}}}";
Request createWatchRequest = new Request("PUT", "_watcher/watch/new_watch");
createWatchRequest.setJsonEntity(watch);
Map<String, Object> createWatch = entityAsMap(client().performRequest(createWatchRequest));
logger.info("create watch {}", createWatch);
assertThat(createWatch.get("created"), equalTo(true));
assertThat(createWatch.get("_version"), equalTo(1));
Map<String, Object> updateWatch = entityAsMap(client().performRequest(createWatchRequest));
assertThat(updateWatch.get("created"), equalTo(false));
assertThat(updateWatch.get("_version"), equalTo(2));
Map<String, Object> get = entityAsMap(client().performRequest(new Request("GET", "_watcher/watch/new_watch")));
assertThat(get.get("found"), equalTo(true));
Map<?, ?> source = (Map<?, ?>) get.get("watch");
Map<String, Object> logging = ObjectPath.eval("actions.awesome.logging", source);
assertEquals("info", logging.get("level"));
assertEquals("test", logging.get("text"));
}
private void waitForYellow(String indexName) throws IOException {
Request request = new Request("GET", "/_cluster/health/" + indexName);
request.addParameter("wait_for_status", "yellow");
request.addParameter("timeout", "30s");
request.addParameter("wait_for_no_relocating_shards", "true");
request.addParameter("wait_for_no_initializing_shards", "true");
Map<String, Object> response = entityAsMap(client().performRequest(request));
assertThat(response.get("timed_out"), equalTo(Boolean.FALSE));
}
private void waitForHits(String indexName, int expectedHits) throws Exception {
Request request = new Request("GET", "/" + indexName + "/_search");
request.addParameter("ignore_unavailable", "true");
request.addParameter("size", "0");
assertBusy(() -> {
try {
Map<String, Object> response = entityAsMap(client().performRequest(request));
Map<?, ?> hits = (Map<?, ?>) response.get("hits");
logger.info("Hits are: {}", hits);
Integer total;
if (getOldClusterVersion().onOrAfter(Version.V_7_0_0) || isRunningAgainstOldCluster() == false) {
total = (Integer) ((Map<?, ?>) hits.get("total")).get("value");
} else {
total = (Integer) hits.get("total");
}
assertThat(total, greaterThanOrEqualTo(expectedHits));
} catch (IOException ioe) {
if (ioe instanceof ResponseException) {
Response response = ((ResponseException) ioe).getResponse();
if (RestStatus.fromCode(response.getStatusLine().getStatusCode()) == RestStatus.SERVICE_UNAVAILABLE) {
fail("shards are not yet active");
}
}
throw ioe;
}
}, 30, TimeUnit.SECONDS);
}
private void startWatcher() throws Exception {
Map<String, Object> startWatchResponse = entityAsMap(client().performRequest(new Request("POST", "_watcher/_start")));
assertThat(startWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE));
assertBusy(() -> {
Map<String, Object> statsWatchResponse = entityAsMap(client().performRequest(new Request("GET", "_watcher/stats")));
List<?> states = ((List<?>) statsWatchResponse.get("stats")).stream()
.map(o -> ((Map<?, ?>) o).get("watcher_state"))
.collect(Collectors.toList());
assertThat(states, everyItem(is("started")));
});
}
private void stopWatcher() throws Exception {
Map<String, Object> stopWatchResponse = entityAsMap(client().performRequest(new Request("POST", "_watcher/_stop")));
assertThat(stopWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE));
assertBusy(() -> {
Map<String, Object> statsStoppedWatchResponse = entityAsMap(client().performRequest(new Request("GET", "_watcher/stats")));
List<?> states = ((List<?>) statsStoppedWatchResponse.get("stats")).stream()
.map(o -> ((Map<?, ?>) o).get("watcher_state"))
.collect(Collectors.toList());
assertThat(states, everyItem(is("stopped")));
});
}
static String toStr(Response response) throws IOException {
return EntityUtils.toString(response.getEntity());
}
private void createUser(final boolean oldCluster) throws Exception {
final String id = oldCluster ? "preupgrade_user" : "postupgrade_user";
Request request = new Request("PUT", "/_security/user/" + id);
request.setJsonEntity(
"{\n"
+ " \"password\" : \"l0ng-r4nd0m-p@ssw0rd\",\n"
+ " \"roles\" : [ \"admin\", \"other_role1\" ],\n"
+ " \"full_name\" : \""
+ randomAlphaOfLength(5)
+ "\",\n"
+ " \"email\" : \""
+ id
+ "@example.com\",\n"
+ " \"enabled\": true\n"
+ "}"
);
client().performRequest(request);
}
private void createRole(final boolean oldCluster) throws Exception {
final String id = oldCluster ? "preupgrade_role" : "postupgrade_role";
Request request = new Request("PUT", "/_security/role/" + id);
request.setJsonEntity(
"{\n"
+ " \"run_as\": [ \"abc\" ],\n"
+ " \"cluster\": [ \"monitor\" ],\n"
+ " \"indices\": [\n"
+ " {\n"
+ " \"names\": [ \"events-*\" ],\n"
+ " \"privileges\": [ \"read\" ],\n"
+ " \"field_security\" : {\n"
+ " \"grant\" : [ \"category\", \"@timestamp\", \"message\" ]\n"
+ " },\n"
+ " \"query\": \"{\\\"match\\\": {\\\"category\\\": \\\"click\\\"}}\"\n"
+ " }\n"
+ " ]\n"
+ "}"
);
client().performRequest(request);
}
private void assertUserInfo(final boolean oldCluster) throws Exception {
final String user = oldCluster ? "preupgrade_user" : "postupgrade_user";
Request request = new Request("GET", "/_security/user/" + user);
;
Map<String, Object> response = entityAsMap(client().performRequest(request));
Map<?, ?> userInfo = (Map<?, ?>) response.get(user);
assertEquals(user + "@example.com", userInfo.get("email"));
assertNotNull(userInfo.get("full_name"));
assertNotNull(userInfo.get("roles"));
}
private void assertRoleInfo(final boolean oldCluster) throws Exception {
final String role = oldCluster ? "preupgrade_role" : "postupgrade_role";
Map<?, ?> response = (Map<?, ?>) entityAsMap(client().performRequest(new Request("GET", "/_security/role/" + role))).get(role);
assertNotNull(response.get("run_as"));
assertNotNull(response.get("cluster"));
assertNotNull(response.get("indices"));
}
private void assertRollUpJob(final String rollupJob) throws Exception {
final Matcher<?> expectedStates = anyOf(equalTo("indexing"), equalTo("started"));
waitForRollUpJob(rollupJob, expectedStates);
// check that the rollup job is started using the RollUp API
final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob);
Map<String, Object> getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest));
Map<?, ?> job = getJob(getRollupJobResponse, rollupJob);
assertNotNull(job);
assertThat(ObjectPath.eval("status.job_state", job), expectedStates);
// check that the rollup job is started using the Tasks API
final Request taskRequest = new Request("GET", "_tasks");
taskRequest.addParameter("detailed", "true");
taskRequest.addParameter("actions", "xpack/rollup/*");
Map<String, Object> taskResponse = entityAsMap(client().performRequest(taskRequest));
Map<?, ?> taskResponseNodes = (Map<?, ?>) taskResponse.get("nodes");
Map<?, ?> taskResponseNode = (Map<?, ?>) taskResponseNodes.values().iterator().next();
Map<?, ?> taskResponseTasks = (Map<?, ?>) taskResponseNode.get("tasks");
Map<?, ?> taskResponseStatus = (Map<?, ?>) taskResponseTasks.values().iterator().next();
assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), expectedStates);
// check that the rollup job is started using the Cluster State API
final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata");
Map<String, Object> clusterStateResponse = entityAsMap(client().performRequest(clusterStateRequest));
List<Map<String, Object>> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse);
boolean hasRollupTask = false;
for (Map<String, Object> task : rollupJobTasks) {
if (ObjectPath.eval("id", task).equals(rollupJob)) {
hasRollupTask = true;
final String jobStateField = "task.xpack/rollup/job.state.job_state";
assertThat(
"Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"),
ObjectPath.eval(jobStateField, task),
expectedStates
);
break;
}
}
if (hasRollupTask == false) {
fail("Expected persistent task for [" + rollupJob + "] but none found.");
}
}
private void waitForRollUpJob(final String rollupJob, final Matcher<?> expectedStates) throws Exception {
assertBusy(() -> {
final Request getRollupJobRequest = new Request("GET", "/_rollup/job/" + rollupJob);
Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
Map<?, ?> job = getJob(getRollupJobResponse, rollupJob);
assertNotNull(job);
assertThat(ObjectPath.eval("status.job_state", job), expectedStates);
}, 30L, TimeUnit.SECONDS);
}
private Map<?, ?> getJob(Response response, String targetJobId) throws IOException {
return getJob(ESRestTestCase.entityAsMap(response), targetJobId);
}
private Map<?, ?> getJob(Map<String, Object> jobsMap, String targetJobId) throws IOException {
List<?> jobs = (List<?>) XContentMapValues.extractValue("jobs", jobsMap);
if (jobs == null) {
return null;
}
for (Object entry : jobs) {
Map<?, ?> job = (Map<?, ?>) entry;
String jobId = (String) ((Map<?, ?>) job.get("config")).get("id");
if (jobId.equals(targetJobId)) {
return job;
}
}
return null;
}
@SuppressWarnings("unchecked")
public void testDataStreams() throws Exception {
assumeTrue("no data streams in versions before " + Version.V_7_9_0, getOldClusterVersion().onOrAfter(Version.V_7_9_0));
if (isRunningAgainstOldCluster()) {
createComposableTemplate(client(), "dst", "ds");
Request indexRequest = new Request("POST", "/ds/_doc/1?op_type=create&refresh");
XContentBuilder builder = JsonXContent.contentBuilder()
.startObject()
.field("f", "v")
.field("@timestamp", System.currentTimeMillis())
.endObject();
indexRequest.setJsonEntity(Strings.toString(builder));
assertOK(client().performRequest(indexRequest));
}
// It's quite possible that this test will run where the data stream backing index is
// created on one day, and then checked on a subsequent day. To avoid this failing the
// test, we store the timestamp used when the document is indexed, then when we go to
// check the backing index name, we retrieve the time and use it for the backing index
// name resolution.
Request getDoc = new Request("GET", "/ds/_search");
Map<String, Object> doc = entityAsMap(client().performRequest(getDoc));
logger.info("--> doc: {}", doc);
Map<String, Object> hits = (Map<String, Object>) doc.get("hits");
Map<String, Object> docBody = (Map<String, Object>) ((List<Object>) hits.get("hits")).get(0);
Long timestamp = (Long) ((Map<String, Object>) docBody.get("_source")).get("@timestamp");
logger.info("--> parsed out timestamp of {}", timestamp);
Request getDataStream = new Request("GET", "/_data_stream/ds");
Response response = client().performRequest(getDataStream);
assertOK(response);
List<Object> dataStreams = (List<Object>) entityAsMap(response).get("data_streams");
assertEquals(1, dataStreams.size());
Map<String, Object> ds = (Map<String, Object>) dataStreams.get(0);
List<Map<String, String>> indices = (List<Map<String, String>>) ds.get("indices");
assertEquals("ds", ds.get("name"));
assertEquals(1, indices.size());
assertEquals(
DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, getOldClusterVersion()),
indices.get(0).get("index_name")
);
assertNumHits("ds", 1, 1);
}
private static void createComposableTemplate(RestClient client, String templateName, String indexPattern) throws IOException {
StringEntity templateJSON = new StringEntity(
String.format(Locale.ROOT, "{\n" + " \"index_patterns\": \"%s\",\n" + " \"data_stream\": {}\n" + "}", indexPattern),
ContentType.APPLICATION_JSON
);
Request createIndexTemplateRequest = new Request("PUT", "_index_template/" + templateName);
createIndexTemplateRequest.setEntity(templateJSON);
client.performRequest(createIndexTemplateRequest);
}
}
|
jmluy/elasticsearch
|
x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java
|
Java
|
apache-2.0
| 44,401 | 50.153226 | 139 | 0.595369 | false |
String.prototype.format = function(args) {
var result = this;
if (arguments.length > 0) {
if (arguments.length == 1 && typeof(args) == "object") {
for (var key in args) {
if (args[key] != undefined) {
var reg = new RegExp("({" + key + "})", "g");
result = result.replace(reg, args[key]);
}
}
} else {
for (var i = 0; i < arguments.length; i++) {
if (arguments[i] != undefined) {
var reg = new RegExp("({)" + i + "(})", "g");
result = result.replace(reg, arguments[i]);
}
}
}
}
return result;
}
$(function() {
$('#btnSub').on('click', function() {
var
$txtUserName = $('#txtUserName'),
txtUserNameVal = $.trim($txtUserName.val()),
$txtUserPwd = $('#txtUserPwd'),
txtUserPwdVal = $.trim($txtUserPwd.val()),
errorTip = '<div id="errorTip" class="alert alert-warning">{0}</div> ';
$("#errorTip,#alt_warning").remove();
if (txtUserNameVal.length == 0) {
$("#container").prepend(errorTip.format('用户名不能为空'));
$txtUserName.focus();
return false;
}
if (txtUserPwdVal.length == 0) {
$("#container").prepend(errorTip.format('密码不能为空'));
$txtUserPwd.focus();
return false;
}
return true;
})
});
|
lnluckybamboo/fd_homework
|
task9/myexpress/public/js/login.js
|
JavaScript
|
apache-2.0
| 1,584 | 31.458333 | 84 | 0.435173 | false |
import os
from selenium.common.exceptions import WebDriverException
from keywordgroup import KeywordGroup
class _JavaScriptKeywords(KeywordGroup):
def __init__(self):
self._cancel_on_next_confirmation = False
# Public
def alert_should_be_present(self, text=''):
"""Verifies an alert is present and dismisses it.
If `text` is a non-empty string, then it is also verified that the
message of the alert equals to `text`.
Will fail if no alert is present. Note that following keywords
will fail unless the alert is dismissed by this
keyword or another like `Get Alert Message`.
"""
alert_text = self.get_alert_message()
if text and alert_text != text:
raise AssertionError("Alert text should have been '%s' but was '%s'"
% (text, alert_text))
def choose_cancel_on_next_confirmation(self):
"""Cancel will be selected the next time `Confirm Action` is used."""
self._cancel_on_next_confirmation = True
def choose_ok_on_next_confirmation(self):
"""Undo the effect of using keywords `Choose Cancel On Next Confirmation`. Note
that Selenium's overridden window.confirm() function will normally automatically
return true, as if the user had manually clicked OK, so you shouldn't
need to use this command unless for some reason you need to change
your mind prior to the next confirmation. After any confirmation, Selenium will resume using the
default behavior for future confirmations, automatically returning
true (OK) unless/until you explicitly use `Choose Cancel On Next Confirmation` for each
confirmation.
Note that every time a confirmation comes up, you must
consume it by using a keywords such as `Get Alert Message`, or else
the following selenium operations will fail.
"""
self._cancel_on_next_confirmation = False
def confirm_action(self):
"""Dismisses currently shown confirmation dialog and returns it's message.
By default, this keyword chooses 'OK' option from the dialog. If
'Cancel' needs to be chosen, keyword `Choose Cancel On Next
Confirmation` must be called before the action that causes the
confirmation dialog to be shown.
Examples:
| Click Button | Send | # Shows a confirmation dialog |
| ${message}= | Confirm Action | # Chooses Ok |
| Should Be Equal | ${message} | Are your sure? |
| | | |
| Choose Cancel On Next Confirmation | | |
| Click Button | Send | # Shows a confirmation dialog |
| Confirm Action | | # Chooses Cancel |
"""
text = self._close_alert(not self._cancel_on_next_confirmation)
self._cancel_on_next_confirmation = False
return text
def execute_javascript(self, *code):
"""Executes the given JavaScript code.
`code` may contain multiple lines of code but must contain a
return statement (with the value to be returned) at the end.
`code` may be divided into multiple cells in the test data. In that
case, the parts are catenated together without adding spaces.
If `code` is an absolute path to an existing file, the JavaScript
to execute will be read from that file. Forward slashes work as
a path separator on all operating systems.
Note that, by default, the code will be executed in the context of the
Selenium object itself, so `this` will refer to the Selenium object.
Use `window` to refer to the window of your application, e.g.
`window.document.getElementById('foo')`.
Example:
| Execute JavaScript | window.my_js_function('arg1', 'arg2') |
| Execute JavaScript | ${CURDIR}/js_to_execute.js |
"""
js = self._get_javascript_to_execute(''.join(code))
self._info("Executing JavaScript:\n%s" % js)
return self._current_browser().execute_script(js)
def get_alert_message(self):
"""Returns the text of current JavaScript alert.
This keyword will fail if no alert is present. Note that
following keywords will fail unless the alert is
dismissed by this keyword or another like `Get Alert Message`.
"""
return self._close_alert()
# Private
def _close_alert(self, confirm=False):
alert = None
try:
alert = self._current_browser().switch_to_alert()
text = ' '.join(alert.text.splitlines()) # collapse new lines chars
if not confirm: alert.dismiss()
else: alert.accept()
return text
except WebDriverException:
raise RuntimeError('There were no alerts')
def _get_javascript_to_execute(self, code):
codepath = code.replace('/', os.sep)
if not (os.path.isabs(codepath) and os.path.isfile(codepath)):
return code
self._html('Reading JavaScript from file <a href="file://%s">%s</a>.'
% (codepath.replace(os.sep, '/'), codepath))
codefile = open(codepath)
try:
return codefile.read().strip()
finally:
codefile.close()
|
hali4ka/robotframework-selenium2library
|
src/Selenium2Library/keywords/_javascript.py
|
Python
|
apache-2.0
| 5,338 | 41.373016 | 104 | 0.631885 | false |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util.state;
import static com.google.common.base.Preconditions.checkState;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn;
import com.google.cloud.dataflow.sdk.transforms.Combine.KeyedCombineFn;
import com.google.cloud.dataflow.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.OutputTimeFn;
import com.google.cloud.dataflow.sdk.util.CombineFnUtil;
import com.google.cloud.dataflow.sdk.util.state.InMemoryStateInternals.InMemoryState;
import com.google.cloud.dataflow.sdk.util.state.StateTag.StateBinder;
import com.google.common.base.Optional;
import com.google.common.collect.Iterables;
import org.joda.time.Instant;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import javax.annotation.Nullable;
/**
* {@link StateInternals} built on top of an underlying {@link StateTable} that contains instances
* of {@link InMemoryState}. Whenever state that exists in the underlying {@link StateTable} is
* accessed, an independent copy will be created within this table.
*/
public class CopyOnAccessInMemoryStateInternals<K> implements StateInternals<K> {
private final K key;
private final CopyOnAccessInMemoryStateTable<K> table;
/**
* Creates a new {@link CopyOnAccessInMemoryStateInternals} with the underlying (possibly null)
* StateInternals.
*/
public static <K> CopyOnAccessInMemoryStateInternals<K> withUnderlying(
K key, @Nullable CopyOnAccessInMemoryStateInternals<K> underlying) {
return new CopyOnAccessInMemoryStateInternals<K>(key, underlying);
}
private CopyOnAccessInMemoryStateInternals(
K key, CopyOnAccessInMemoryStateInternals<K> underlying) {
this.key = key;
table =
new CopyOnAccessInMemoryStateTable<K>(key, underlying == null ? null : underlying.table);
}
/**
* Ensures this {@link CopyOnAccessInMemoryStateInternals} is complete. Other copies of state for
* the same Step and Key may be discarded after invoking this method.
*
* <p>For each {@link StateNamespace}, for each {@link StateTag address} in that namespace that
* has not been bound in this {@link CopyOnAccessInMemoryStateInternals}, put a reference to that
* state within this {@link StateInternals}.
*
* <p>Additionally, stores the {@link WatermarkHoldState} with the earliest time bound in the
* state table after the commit is completed, enabling calls to
* {@link #getEarliestWatermarkHold()}.
*
* @return this table
*/
public CopyOnAccessInMemoryStateInternals<K> commit() {
table.commit();
return this;
}
/**
* Gets the earliest Watermark Hold present in this table.
*
* <p>Must be called after this state has been committed. Will throw an
* {@link IllegalStateException} if the state has not been committed.
*/
public Instant getEarliestWatermarkHold() {
// After commit, the watermark hold is always present, but may be
// BoundedWindow#TIMESTAMP_MAX_VALUE if there is no hold set.
checkState(
table.earliestWatermarkHold.isPresent(),
"Can't get the earliest watermark hold in a %s before it is committed",
getClass().getSimpleName());
return table.earliestWatermarkHold.get();
}
@Override
public <T extends State> T state(StateNamespace namespace, StateTag<? super K, T> address) {
return state(namespace, address, StateContexts.nullContext());
}
@Override
public <T extends State> T state(
StateNamespace namespace, StateTag<? super K, T> address, StateContext<?> c) {
return table.get(namespace, address, c);
}
@Override
public K getKey() {
return key;
}
public boolean isEmpty() {
return Iterables.isEmpty(table.values());
}
/**
* A {@link StateTable} that, when a value is retrieved with
* {@link StateTable#get(StateNamespace, StateTag)}, first attempts to obtain a copy of existing
* {@link State} from an underlying {@link StateTable}.
*/
private static class CopyOnAccessInMemoryStateTable<K> extends StateTable<K> {
private final K key;
private Optional<StateTable<K>> underlying;
/**
* The StateBinderFactory currently in use by this {@link CopyOnAccessInMemoryStateTable}.
*
* <p>There are three {@link StateBinderFactory} implementations used by the {@link
* CopyOnAccessInMemoryStateTable}.
* <ul>
* <li>The default {@link StateBinderFactory} is a {@link CopyOnBindBinderFactory}, allowing
* the table to copy any existing {@link State} values to this {@link StateTable} from the
* underlying table when accessed, at which point mutations will not be visible to the
* underlying table - effectively a "Copy by Value" binder.</li>
* <li>During the execution of the {@link #commit()} method, this is a
* {@link ReadThroughBinderFactory}, which copies the references to the existing
* {@link State} objects to this {@link StateTable}.</li>
* <li>After the execution of the {@link #commit()} method, this is an
* instance of {@link InMemoryStateBinderFactory}, which constructs new instances of state
* when a {@link StateTag} is bound.</li>
* </ul>
*/
private StateBinderFactory<K> binderFactory;
/**
* The earliest watermark hold in this table.
*/
private Optional<Instant> earliestWatermarkHold;
public CopyOnAccessInMemoryStateTable(K key, StateTable<K> underlying) {
this.key = key;
this.underlying = Optional.fromNullable(underlying);
binderFactory = new CopyOnBindBinderFactory<>(key, this.underlying);
earliestWatermarkHold = Optional.absent();
}
/**
* Copies all values in the underlying table to this table, then discards the underlying table.
*
* <p>If there is an underlying table, this replaces the existing
* {@link CopyOnBindBinderFactory} with a {@link ReadThroughBinderFactory}, then reads all of
* the values in the existing table, binding the state values to this table. The old StateTable
* should be discarded after the call to {@link #commit()}.
*
* <p>After copying all of the existing values, replace the binder factory with an instance of
* {@link InMemoryStateBinderFactory} to construct new values, since all existing values
* are bound in this {@link StateTable table} and this table represents the canonical state.
*/
private void commit() {
Instant earliestHold = getEarliestWatermarkHold();
if (underlying.isPresent()) {
ReadThroughBinderFactory<K> readThroughBinder =
new ReadThroughBinderFactory<>(underlying.get());
binderFactory = readThroughBinder;
Instant earliestUnderlyingHold = readThroughBinder.readThroughAndGetEarliestHold(this);
if (earliestUnderlyingHold.isBefore(earliestHold)) {
earliestHold = earliestUnderlyingHold;
}
}
earliestWatermarkHold = Optional.of(earliestHold);
clearEmpty();
binderFactory = new InMemoryStateBinderFactory<>(key);
underlying = Optional.absent();
}
/**
* Get the earliest watermark hold in this table. Ignores the contents of any underlying table.
*/
private Instant getEarliestWatermarkHold() {
Instant earliest = BoundedWindow.TIMESTAMP_MAX_VALUE;
for (State existingState : this.values()) {
if (existingState instanceof WatermarkHoldState) {
Instant hold = ((WatermarkHoldState<?>) existingState).read();
if (hold != null && hold.isBefore(earliest)) {
earliest = hold;
}
}
}
return earliest;
}
/**
* Clear all empty {@link StateNamespace StateNamespaces} from this table. If all states are
* empty, clear the entire table.
*
* <p>Because {@link InMemoryState} is not removed from the {@link StateTable} after it is
* cleared, in case contents are modified after being cleared, the table must be explicitly
* checked to ensure that it contains state and removed if not (otherwise we may never use
* the table again).
*/
private void clearEmpty() {
Collection<StateNamespace> emptyNamespaces = new HashSet<>(this.getNamespacesInUse());
for (StateNamespace namespace : this.getNamespacesInUse()) {
for (State existingState : this.getTagsInUse(namespace).values()) {
if (!((InMemoryState<?>) existingState).isCleared()) {
emptyNamespaces.remove(namespace);
break;
}
}
}
for (StateNamespace empty : emptyNamespaces) {
this.clearNamespace(empty);
}
}
@Override
protected StateBinder<K> binderForNamespace(final StateNamespace namespace, StateContext<?> c) {
return binderFactory.forNamespace(namespace, c);
}
private static interface StateBinderFactory<K> {
StateBinder<K> forNamespace(StateNamespace namespace, StateContext<?> c);
}
/**
* {@link StateBinderFactory} that creates a copy of any existing state when the state is bound.
*/
private static class CopyOnBindBinderFactory<K> implements StateBinderFactory<K> {
private final K key;
private final Optional<StateTable<K>> underlying;
public CopyOnBindBinderFactory(K key, Optional<StateTable<K>> underlying) {
this.key = key;
this.underlying = underlying;
}
private boolean containedInUnderlying(StateNamespace namespace, StateTag<? super K, ?> tag) {
return underlying.isPresent() && underlying.get().isNamespaceInUse(namespace)
&& underlying.get().getTagsInUse(namespace).containsKey(tag);
}
@Override
public StateBinder<K> forNamespace(final StateNamespace namespace, final StateContext<?> c) {
return new StateBinder<K>() {
@Override
public <W extends BoundedWindow> WatermarkHoldState<W> bindWatermark(
StateTag<? super K, WatermarkHoldState<W>> address,
OutputTimeFn<? super W> outputTimeFn) {
if (containedInUnderlying(namespace, address)) {
@SuppressWarnings("unchecked")
InMemoryState<? extends WatermarkHoldState<W>> existingState =
(InMemoryStateInternals.InMemoryState<? extends WatermarkHoldState<W>>)
underlying.get().get(namespace, address, c);
return existingState.copy();
} else {
return new InMemoryStateInternals.InMemoryWatermarkHold<>(
outputTimeFn);
}
}
@Override
public <T> ValueState<T> bindValue(
StateTag<? super K, ValueState<T>> address, Coder<T> coder) {
if (containedInUnderlying(namespace, address)) {
@SuppressWarnings("unchecked")
InMemoryState<? extends ValueState<T>> existingState =
(InMemoryStateInternals.InMemoryState<? extends ValueState<T>>)
underlying.get().get(namespace, address, c);
return existingState.copy();
} else {
return new InMemoryStateInternals.InMemoryValue<>();
}
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindCombiningValue(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder, CombineFn<InputT, AccumT, OutputT> combineFn) {
if (containedInUnderlying(namespace, address)) {
@SuppressWarnings("unchecked")
InMemoryState<? extends AccumulatorCombiningState<InputT, AccumT, OutputT>>
existingState = (
InMemoryStateInternals
.InMemoryState<? extends AccumulatorCombiningState<InputT, AccumT,
OutputT>>) underlying.get().get(namespace, address, c);
return existingState.copy();
} else {
return new InMemoryStateInternals.InMemoryCombiningValue<>(
key, combineFn.asKeyedFn());
}
}
@Override
public <T> BagState<T> bindBag(
StateTag<? super K, BagState<T>> address, Coder<T> elemCoder) {
if (containedInUnderlying(namespace, address)) {
@SuppressWarnings("unchecked")
InMemoryState<? extends BagState<T>> existingState =
(InMemoryStateInternals.InMemoryState<? extends BagState<T>>)
underlying.get().get(namespace, address, c);
return existingState.copy();
} else {
return new InMemoryStateInternals.InMemoryBag<>();
}
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindKeyedCombiningValue(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
KeyedCombineFn<? super K, InputT, AccumT, OutputT> combineFn) {
if (containedInUnderlying(namespace, address)) {
@SuppressWarnings("unchecked")
InMemoryState<? extends AccumulatorCombiningState<InputT, AccumT, OutputT>>
existingState = (
InMemoryStateInternals
.InMemoryState<? extends AccumulatorCombiningState<InputT, AccumT,
OutputT>>) underlying.get().get(namespace, address, c);
return existingState.copy();
} else {
return new InMemoryStateInternals.InMemoryCombiningValue<>(key, combineFn);
}
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindKeyedCombiningValueWithContext(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
KeyedCombineFnWithContext<? super K, InputT, AccumT, OutputT> combineFn) {
return bindKeyedCombiningValue(
address, accumCoder, CombineFnUtil.bindContext(combineFn, c));
}
};
}
}
/**
* {@link StateBinderFactory} that reads directly from the underlying table. Used during calls
* to {@link CopyOnAccessInMemoryStateTable#commit()} to read all values from
* the underlying table.
*/
private static class ReadThroughBinderFactory<K> implements StateBinderFactory<K> {
private final StateTable<K> underlying;
public ReadThroughBinderFactory(StateTable<K> underlying) {
this.underlying = underlying;
}
public Instant readThroughAndGetEarliestHold(StateTable<K> readTo) {
Instant earliestHold = BoundedWindow.TIMESTAMP_MAX_VALUE;
for (StateNamespace namespace : underlying.getNamespacesInUse()) {
for (Map.Entry<StateTag<? super K, ?>, ? extends State> existingState :
underlying.getTagsInUse(namespace).entrySet()) {
if (!((InMemoryState<?>) existingState.getValue()).isCleared()) {
// Only read through non-cleared values to ensure that completed windows are
// eventually discarded, and remember the earliest watermark hold from among those
// values.
State state =
readTo.get(namespace, existingState.getKey(), StateContexts.nullContext());
if (state instanceof WatermarkHoldState) {
Instant hold = ((WatermarkHoldState<?>) state).read();
if (hold != null && hold.isBefore(earliestHold)) {
earliestHold = hold;
}
}
}
}
}
return earliestHold;
}
@Override
public StateBinder<K> forNamespace(final StateNamespace namespace, final StateContext<?> c) {
return new StateBinder<K>() {
@Override
public <W extends BoundedWindow> WatermarkHoldState<W> bindWatermark(
StateTag<? super K, WatermarkHoldState<W>> address,
OutputTimeFn<? super W> outputTimeFn) {
return underlying.get(namespace, address, c);
}
@Override
public <T> ValueState<T> bindValue(
StateTag<? super K, ValueState<T>> address, Coder<T> coder) {
return underlying.get(namespace, address, c);
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindCombiningValue(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder, CombineFn<InputT, AccumT, OutputT> combineFn) {
return underlying.get(namespace, address, c);
}
@Override
public <T> BagState<T> bindBag(
StateTag<? super K, BagState<T>> address, Coder<T> elemCoder) {
return underlying.get(namespace, address, c);
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindKeyedCombiningValue(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
KeyedCombineFn<? super K, InputT, AccumT, OutputT> combineFn) {
return underlying.get(namespace, address, c);
}
@Override
public <InputT, AccumT, OutputT> AccumulatorCombiningState<InputT, AccumT, OutputT>
bindKeyedCombiningValueWithContext(
StateTag<? super K, AccumulatorCombiningState<InputT, AccumT, OutputT>> address,
Coder<AccumT> accumCoder,
KeyedCombineFnWithContext<? super K, InputT, AccumT, OutputT> combineFn) {
return bindKeyedCombiningValue(
address, accumCoder, CombineFnUtil.bindContext(combineFn, c));
}
};
}
}
private static class InMemoryStateBinderFactory<K> implements StateBinderFactory<K> {
private final K key;
public InMemoryStateBinderFactory(K key) {
this.key = key;
}
@Override
public StateBinder<K> forNamespace(StateNamespace namespace, StateContext<?> c) {
return new InMemoryStateInternals.InMemoryStateBinder<>(key, c);
}
}
}
}
|
shakamunyi/beam
|
sdk/src/main/java/com/google/cloud/dataflow/sdk/util/state/CopyOnAccessInMemoryStateInternals.java
|
Java
|
apache-2.0
| 19,484 | 41.9163 | 100 | 0.653665 | false |
#!/bin/env python
# Import modules
import unittest
import Pyro4
from xdd.transport import FlowBuilderTransport
from xdd.transport import FlowBuilderTransportError
# Standard testing strategy: try to check each side of each branch statement
# for correctness
# Test constructor method of FlowBuilderTransport
class FlowBuilderTransportTestCase(unittest.TestCase):
"""Test FlowBuilderTransport constructor"""
def test_valid_host(self):
"""Test construction with a legal hostname"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
try:
f = FlowBuilderTransport(host, host)
self.assertEqual(True, True)
except:
self.assertEqual(False, True)
def test_invalid_host(self):
"""Test construction with an invalid hostname"""
try:
f = FlowBuilderTransport('invalid-hostip-string', 'invalid-hostname-string')
self.assertEqual(True, False)
except FlowBuilderTransportError:
self.assertEqual(True, True)
except:
self.assertEqual(True, False)
def test_single_transport(self):
"""Create a single server on a host"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
fbt1 = FlowBuilderTransport(host, host)
fbt1.shutdown()
fbt2 = FlowBuilderTransport(host, host)
fbt2.shutdown()
def test_multiple_transport(self):
"""Create multiple servers on the same host"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
fbt1 = FlowBuilderTransport(host, host)
fbt2 = FlowBuilderTransport(host, host)
fbt2.shutdown()
fbt1.shutdown()
def test_multiple_transport_detailed(self):
"""Create multiple servers on the same host"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
fbt1 = FlowBuilderTransport(host, host)
fbt2 = FlowBuilderTransport(host, host)
# Now test that a message can cross the transports
host1 = fbt1.getFlowBuilder().hostname()
host2 = fbt2.getFlowBuilder().hostname()
self.assertEqual(host1, host2)
# Shutdown cleanly
fbt2.shutdown()
fbt1.shutdown()
# Test getFlowBuilder method of FlowBuilderTransport
class FlowBuilderTransportGetFlowBuilderTestCase(unittest.TestCase):
"""Test getFlowBuilder method of FlowBuilderTransport"""
def test_single_flowbuilder(self):
"""Create a single server on a host"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
fbt = FlowBuilderTransport(host, host)
fb = fbt.getFlowBuilder()
self.assertEqual(True, fb.isReady())
self.assertRaises(Pyro4.errors.ConnectionClosedError, fb.shutdown)
def test_multiple_flowbuilder(self):
"""Create multiple servers on the same host"""
# Get the hostname
import platform
host = platform.node()
self.assertNotEqual(0, len(host))
fbt1 = FlowBuilderTransport(host, host)
fb1 = fbt1.getFlowBuilder()
fbt2 = FlowBuilderTransport(host, host)
fb2 = fbt2.getFlowBuilder()
self.assertEqual(True, fb1.isReady())
self.assertEqual(True, fb2.isReady())
self.assertRaises(Pyro4.errors.ConnectionClosedError, fb1.shutdown)
self.assertRaises(Pyro4.errors.ConnectionClosedError, fb2.shutdown)
if __name__ == '__main__':
unittest.main()
|
eunsungc/gt6-RAMSES_8_5
|
xdd-7.0.0.rc-ramses3/tests/unit/site-packages/xdd_transport_test.py
|
Python
|
apache-2.0
| 3,788 | 30.04918 | 88 | 0.642819 | false |
$: << 'cf_spec'
require 'cf_spec_helper'
describe 'CF PHP Buildpack' do
let(:browser) { Machete::Browser.new(@app) }
before(:context) { @app_name = 'php_app'}
context 'deploying a basic PHP app' do
before(:all) do
@env_config = {env: {'COMPOSER_GITHUB_OAUTH_TOKEN' => ENV['COMPOSER_GITHUB_OAUTH_TOKEN']}}
@app = deploy_app(@app_name, @env_config)
end
after(:all) do
Machete::CF::DeleteApp.new.execute(@app)
end
it 'expects an app to be running' do
expect(@app).to be_running
end
it 'displays the buildpack version' do
expect(@app).to have_logged "-------> Buildpack version #{File.read(File.expand_path('../../../VERSION', __FILE__)).chomp}"
end
it 'installs a current version of PHP' do
expect(@app).to have_logged 'Installing PHP'
expect(@app).to have_logged 'PHP 5.6'
end
it 'does not return the version of PHP in the response headers' do
browser.visit_path('/')
expect(browser).to have_body 'PHP Version'
expect(browser).not_to have_header 'X-Powered-By'
end
it 'does not display a warning message about the php version config' do
expect(@app).to_not have_logged 'WARNING: A version of PHP has been specified in both `composer.json` and `./bp-config/options.json`.'
expect(@app).to_not have_logged 'WARNING: The version defined in `composer.json` will be used.'
end
end
context 'in offline mode', :cached do
before(:all) do
@env_config = {env: {'COMPOSER_GITHUB_OAUTH_TOKEN' => ENV['COMPOSER_GITHUB_OAUTH_TOKEN']}}
@app = deploy_app(@app_name, @env_config)
end
after(:all) do
Machete::CF::DeleteApp.new.execute(@app)
end
it 'does not call out to the internet' do
expect(@app).not_to have_internet_traffic
end
it 'downloads the binaries directly from the buildpack' do
expect(@app).to have_logged %r{Downloaded \[file://.*/dependencies/https___buildpacks.cloudfoundry.org_dependencies_php_php-5.6.\d+-linux-x64-[\da-f]+.tgz\] to \[/tmp\]}
end
end
context 'using default versions' do
before(:all) do
@env_config = {env: {'COMPOSER_GITHUB_OAUTH_TOKEN' => ENV['COMPOSER_GITHUB_OAUTH_TOKEN'], 'BP_DEBUG' => 1}}
@app = deploy_app(@app_name, @env_config)
end
after(:all) do
Machete::CF::DeleteApp.new.execute(@app)
end
it 'installs the default version of PHP' do
expect(@app).to have_logged '"update_default_version" is setting [PHP_VERSION]'
end
it 'installs the default version of composer' do
expect(@app).to have_logged 'DEBUG: default_version_for composer is'
end
end
end
|
Orange-OpenSource/cf-php-build-pack
|
cf_spec/integration/deploy_a_php_app_spec.rb
|
Ruby
|
apache-2.0
| 2,667 | 31.13253 | 175 | 0.643045 | false |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.unusedSymbol;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiModifier;
import com.intellij.ui.ClickListener;
import com.intellij.ui.JBColor;
import com.intellij.ui.UserActivityProviderComponent;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Consumer;
import com.intellij.util.VisibilityUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Set;
import java.util.function.Supplier;
public class VisibilityModifierChooser extends JLabel implements UserActivityProviderComponent {
@PsiModifier.ModifierConstant private static final String[] MODIFIERS =
new String[]{PsiModifier.PRIVATE, PsiModifier.PACKAGE_LOCAL, PsiModifier.PROTECTED, PsiModifier.PUBLIC};
private final Supplier<Boolean> myCanBeEnabled;
private final Set<ChangeListener> myListeners = new HashSet<>();
private String myCurrentModifier;
public VisibilityModifierChooser(@NotNull Supplier<Boolean> canBeEnabled,
@NotNull String modifier,
@NotNull Consumer<String> modifierChangedConsumer) {
this(canBeEnabled, modifier, modifierChangedConsumer, MODIFIERS);
}
public VisibilityModifierChooser(@NotNull Supplier<Boolean> canBeEnabled,
@NotNull String modifier,
@NotNull Consumer<String> modifierChangedConsumer,
@NotNull String[] modifiers) {
myCanBeEnabled = canBeEnabled;
setIcon(AllIcons.General.Combo2);
setDisabledIcon(AllIcons.General.Combo2);
setIconTextGap(0);
setHorizontalTextPosition(SwingConstants.LEFT);
myCurrentModifier = modifier;
setText(getPresentableText(myCurrentModifier));
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
if (!isEnabled()) return true;
@SuppressWarnings("UseOfObsoleteCollectionType")
Hashtable<Integer, JComponent> sliderLabels = new Hashtable<>();
for (int i = 0; i < modifiers.length; i++) {
sliderLabels.put(i + 1, new JLabel(getPresentableText(modifiers[i])));
}
JSlider slider = new JSlider(SwingConstants.VERTICAL, 1, modifiers.length, 1);
slider.addChangeListener(val -> {
final String modifier = modifiers[slider.getValue() - 1];
if (myCurrentModifier != modifier) {
myCurrentModifier = modifier;
modifierChangedConsumer.consume(modifier);
setText(getPresentableText(modifier));
fireStateChanged();
}
});
slider.setLabelTable(sliderLabels);
slider.putClientProperty(UIUtil.JSLIDER_ISFILLED, Boolean.TRUE);
slider.setPreferredSize(JBUI.size(150, modifiers.length * 25));
slider.setPaintLabels(true);
slider.setSnapToTicks(true);
slider.setValue(ArrayUtil.find(modifiers, myCurrentModifier) + 1);
final JBPopup popup = JBPopupFactory.getInstance()
.createComponentPopupBuilder(slider, null)
.setTitle("Effective Visibility")
.setCancelOnClickOutside(true)
.setMovable(true)
.createPopup();
popup.show(new RelativePoint(VisibilityModifierChooser.this, new Point(getWidth(), 0)));
return true;
}
}.installOn(this);
}
private void fireStateChanged() {
for (ChangeListener listener : myListeners) {
listener.stateChanged(new ChangeEvent(this));
}
}
private static String getPresentableText(String modifier) {
return StringUtil.capitalize(VisibilityUtil.toPresentableText(modifier));
}
@Override
public void setForeground(Color fg) {
super.setForeground(isEnabled() ? JBColor.link() : fg);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled && myCanBeEnabled.get());
}
@Override
public void addChangeListener(ChangeListener changeListener) {
myListeners.add(changeListener);
}
@Override
public void removeChangeListener(ChangeListener changeListener) {
myListeners.remove(changeListener);
}
}
|
jk1/intellij-community
|
java/java-impl/src/com/intellij/codeInspection/unusedSymbol/VisibilityModifierChooser.java
|
Java
|
apache-2.0
| 4,767 | 37.443548 | 140 | 0.709251 | false |
package org.apache.maven.plugin.assembly.mojos;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.project.MavenProject;
/**
* Like the <code>assembly:attached</code> goal, assemble an application bundle or distribution from an assembly
* descriptor. This goal is suitable either for binding to the lifecycle or calling directly from the command line
* (provided all required files are available before the build starts, or are produced by another goal specified before
* this one on the command line). <br/>
*
* This goal differs from <code>assembly:single</code> in that it ignores the <formats/> section of the assembly
* descriptor, and forces the assembly to be created as a directory in the project's build-output directory (usually
* <code>./target</code>).
*
* @author <a href="mailto:[email protected]">John Casey</a>
* @author <a href="mailto:[email protected]">Gilles Scokart</a>
* @version $Id$
*
* @deprecated Use assembly:single and an assembly with format == dir instead! This mojo is redundant.
*/
@Mojo( name = "directory-single", inheritByDefault = false )
@Deprecated
public class DirectorySingleMojo
extends AbstractDirectoryMojo
{
/**
*/
@Component
private MavenProject project;
@Override
public MavenProject getProject()
{
return project;
}
}
|
dmlloyd/maven-plugins
|
maven-assembly-plugin/src/main/java/org/apache/maven/plugin/assembly/mojos/DirectorySingleMojo.java
|
Java
|
apache-2.0
| 2,239 | 37.603448 | 119 | 0.742296 | false |
/*
* Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
*/
#include "bgp/bgp_multicast.h"
#include "base/label_block.h"
#include "base/logging.h"
#include "base/task.h"
#include "base/task_annotations.h"
#include "base/test/task_test_util.h"
#include "bgp/bgp_attr.h"
#include "bgp/ipeer.h"
#include "bgp/ermvpn/ermvpn_route.h"
#include "bgp/ermvpn/ermvpn_table.h"
#include "bgp/test/bgp_server_test_util.h"
#include "control-node/control_node.h"
#include "db/db.h"
#include "db/db_table.h"
#include "io/event_manager.h"
#include "testing/gunit.h"
using namespace std;
using namespace boost;
class XmppPeerMock : public IPeer {
public:
XmppPeerMock(BgpServer *server, string address_str)
: server_(server),
address_str_(address_str),
label_block_(new LabelBlock(1000, 1500 -1)) {
boost::system::error_code ec;
BgpAttrSpec attr_spec;
address_ = Ip4Address::from_string(address_str.c_str(), ec);
BgpAttrNextHop nexthop(address_.to_ulong());
attr_spec.push_back(&nexthop);
BgpAttrLabelBlock label_block(label_block_);
attr_spec.push_back(&label_block);
attr = server_->attr_db()->Locate(attr_spec);
}
virtual ~XmppPeerMock() { }
void AddRoute(ErmVpnTable *table, string group_str, string source_str) {
boost::system::error_code ec;
RouteDistinguisher rd(address_.to_ulong(), 65535);
Ip4Address group = Ip4Address::from_string(group_str.c_str(), ec);
Ip4Address source = Ip4Address::from_string(source_str.c_str(), ec);
DBRequest req;
req.oper = DBRequest::DB_ENTRY_ADD_CHANGE;
ErmVpnPrefix prefix(ErmVpnPrefix::NativeRoute, rd, group, source);
req.key.reset(new ErmVpnTable::RequestKey(prefix, this));
req.data.reset(new ErmVpnTable::RequestData(attr, 0, 0));
table->Enqueue(&req);
}
void AddRoute(ErmVpnTable *table, string group_str) {
AddRoute(table, group_str, "0.0.0.0");
}
void DelRoute(ErmVpnTable *table, string group_str, string source_str) {
boost::system::error_code ec;
RouteDistinguisher rd(address_.to_ulong(), 65535);
Ip4Address group = Ip4Address::from_string(group_str.c_str(), ec);
Ip4Address source = Ip4Address::from_string(source_str.c_str(), ec);
DBRequest req;
req.oper = DBRequest::DB_ENTRY_DELETE;
ErmVpnPrefix prefix(ErmVpnPrefix::NativeRoute, rd, group, source);
req.key.reset(new ErmVpnTable::RequestKey(prefix, this));
table->Enqueue(&req);
}
void DelRoute(ErmVpnTable *table, string group_str) {
DelRoute(table, group_str, "0.0.0.0");
}
virtual std::string ToString() const { return address_str_; }
virtual std::string ToUVEKey() const { return address_str_; }
virtual BgpServer *server() { return server_; }
virtual IPeerClose *peer_close() { return NULL; }
virtual IPeerDebugStats *peer_stats() { return NULL; }
virtual bool IsReady() const { return true; }
virtual bool IsXmppPeer() const { return true; }
virtual void Close() { }
virtual BgpProto::BgpPeerType PeerType() const { return BgpProto::IBGP; }
virtual uint32_t bgp_identifier() const { return address_.to_ulong(); }
virtual const std::string GetStateName() const { return ""; }
virtual bool SendUpdate(const uint8_t *msg, size_t msgsize) { return true; }
virtual void UpdateRefCount(int count) const { }
virtual tbb::atomic<int> GetRefCount() const {
tbb::atomic<int> count;
count = 0;
return count;
}
private:
BgpServer *server_;
string address_str_;
Ip4Address address_;
LabelBlockPtr label_block_;
BgpAttrPtr attr;
};
class BgpMulticastTest : public ::testing::Test {
protected:
static const int kPeerCount = 1 + McastTreeManager::kDegree +
McastTreeManager::kDegree * McastTreeManager::kDegree;
static const int kEvenPeerCount = (kPeerCount + 1) / 2;
static const int kOddPeerCount = kPeerCount / 2;
BgpMulticastTest() : server_(&evm_) { }
virtual void SetUp() {
ConcurrencyScope scope("bgp::Config");
master_cfg_.reset(BgpTestUtil::CreateBgpInstanceConfig(
BgpConfigManager::kMasterInstance, "", ""));
red_cfg_.reset(BgpTestUtil::CreateBgpInstanceConfig(
"red", "target:1.2.3.4:1", "target:1.2.3.4:1"));
green_cfg_.reset(BgpTestUtil::CreateBgpInstanceConfig(
"green", "target:1.2.3.4:2", "target:1.2.3.4:2"));
TaskScheduler *scheduler = TaskScheduler::GetInstance();
scheduler->Stop();
server_.routing_instance_mgr()->CreateRoutingInstance(red_cfg_.get());
server_.routing_instance_mgr()->CreateRoutingInstance(green_cfg_.get());
scheduler->Start();
task_util::WaitForIdle();
red_table_ = static_cast<ErmVpnTable *>(
server_.database()->FindTable("red.ermvpn.0"));
red_tm_ = red_table_->tree_manager_;
TASK_UTIL_EXPECT_EQ(red_table_, red_tm_->table_);
TASK_UTIL_EXPECT_EQ(DB::PartitionCount(),
(int)red_tm_->partitions_.size());
TASK_UTIL_EXPECT_NE(-1, red_tm_->listener_id_);
green_table_ = static_cast<ErmVpnTable *>(
server_.database()->FindTable("green.ermvpn.0"));
green_tm_ = green_table_->tree_manager_;
TASK_UTIL_EXPECT_EQ(green_table_, green_tm_->table_);
TASK_UTIL_EXPECT_EQ(DB::PartitionCount(),
(int)green_tm_->partitions_.size());
TASK_UTIL_EXPECT_NE(-1, green_tm_->listener_id_);
CreatePeers();
}
virtual void TearDown() {
task_util::WaitForIdle();
server_.Shutdown();
task_util::WaitForIdle();
STLDeleteValues(&peers_);
}
void CreatePeers() {
for (int idx = 0; idx < kPeerCount; idx++) {
std::ostringstream repr;
repr << "10.1.1." << (idx+1);
XmppPeerMock *peer = new XmppPeerMock(&server_, repr.str());
peers_.push_back(peer);
}
}
void AddRoutePeers(ErmVpnTable *table,
string group_str, string source_str, bool even, bool odd) {
for (vector<XmppPeerMock *>::iterator it = peers_.begin();
it != peers_.end(); ++it) {
if ((((it - peers_.begin()) % 2 == 0) == even) ||
(((it - peers_.begin()) % 2 == 1) == odd)) {
(*it)->AddRoute(table, group_str, source_str);
}
}
}
void AddRouteAllPeers(ErmVpnTable *table,
string group_str, string source_str) {
AddRoutePeers(table, group_str, source_str, true, true);
}
void AddRouteAllPeers(ErmVpnTable *table, string group_str) {
AddRouteAllPeers(table, group_str, "0.0.0.0");
}
void AddRouteEvenPeers(ErmVpnTable *table,
string group_str, string source_str) {
AddRoutePeers(table, group_str, source_str, true, false);
}
void AddRouteEvenPeers(ErmVpnTable *table, string group_str) {
AddRouteEvenPeers(table, group_str, "0.0.0.0");
}
void AddRouteOddPeers(ErmVpnTable *table,
string group_str, string source_str) {
AddRoutePeers(table, group_str, source_str, false, true);
}
void AddRouteOddPeers(ErmVpnTable *table, string group_str) {
AddRouteOddPeers(table, group_str, "0.0.0.0");
}
void DelRoutePeers(ErmVpnTable *table,
string group_str, string source_str, bool even, bool odd) {
for (vector<XmppPeerMock *>::iterator it = peers_.begin();
it != peers_.end(); ++it) {
if ((((it - peers_.begin()) % 2 == 0) == even) ||
(((it - peers_.begin()) % 2 == 1) == odd)) {
(*it)->DelRoute(table, group_str, source_str);
}
}
}
void DelRouteAllPeers(ErmVpnTable *table,
string group_str, string source_str) {
DelRoutePeers(table, group_str, source_str, true, true);
}
void DelRouteAllPeers(ErmVpnTable *table, string group_str) {
DelRouteAllPeers(table, group_str, "0.0.0.0");
}
void DelRouteEvenPeers(ErmVpnTable *table,
string group_str, string source_str) {
DelRoutePeers(table, group_str, source_str, true, false);
}
void DelRouteEvenPeers(ErmVpnTable *table, string group_str) {
DelRouteEvenPeers(table, group_str, "0.0.0.0");
}
void DelRouteOddPeers(ErmVpnTable *table,
string group_str, string source_str) {
DelRoutePeers(table, group_str, source_str, false, true);
}
void DelRouteOddPeers(ErmVpnTable *table, string group_str) {
DelRouteOddPeers(table, group_str, "0.0.0.0");
}
void VerifyRouteCount(ErmVpnTable *table, size_t count) {
TASK_UTIL_EXPECT_EQ(count, table->Size());
}
void VerifySGCount(McastTreeManager *tm, size_t count) {
size_t total = 0;
for (McastTreeManager::PartitionList::iterator it =
tm->partitions_.begin();
it != tm->partitions_.end(); ++it) {
total += (*it)->sg_list_.size();
}
TASK_UTIL_EXPECT_EQ(count, total);
}
void VerifyForwarderProperties(ErmVpnTable *table,
McastForwarder *forwarder) {
ConcurrencyScope scope("db::DBTable");
EXPECT_GE(forwarder->label(), 1000);
EXPECT_LE(forwarder->label(), 1499);
EXPECT_GE(forwarder->tree_links_.size(), 1);
EXPECT_LE(forwarder->tree_links_.size(), McastTreeManager::kDegree + 1);
TASK_UTIL_EXPECT_TRUE(forwarder->route() != NULL);
TASK_UTIL_EXPECT_EQ(1, forwarder->route()->count());
boost::scoped_ptr<UpdateInfo> uinfo(forwarder->GetUpdateInfo(table));
TASK_UTIL_EXPECT_TRUE(uinfo.get() != NULL);
BgpOList *olist = uinfo->roattr.attr()->olist().get();
TASK_UTIL_EXPECT_TRUE(olist != NULL);
EXPECT_GE(olist->elements.size(), 1);
EXPECT_LE(olist->elements.size(), McastTreeManager::kDegree + 1);
}
void VerifyOnlyForwarderProperties(ErmVpnTable *table,
McastForwarder *forwarder) {
ConcurrencyScope scope("db::DBTable");
TASK_UTIL_EXPECT_NE(0, forwarder->label());
TASK_UTIL_EXPECT_EQ(0, forwarder->tree_links_.size());
TASK_UTIL_EXPECT_TRUE(forwarder->route() != NULL);
TASK_UTIL_EXPECT_EQ(1, forwarder->route()->count());
boost::scoped_ptr<UpdateInfo> uinfo(forwarder->GetUpdateInfo(table));
TASK_UTIL_EXPECT_TRUE(uinfo.get() == NULL);
}
void VerifyForwarderLinks(McastForwarder *forwarder) {
BGP_DEBUG_UT(" McastForwarder " << forwarder->ToString());
for (McastForwarderList::iterator it = forwarder->tree_links_.begin();
it != forwarder->tree_links_.end(); ++it) {
BGP_DEBUG_UT(" Link to " << (*it)->ToString());
}
}
void VerifyForwarderCount(McastTreeManager *tm,
string group_str, string source_str, size_t count) {
boost::system::error_code ec;
Ip4Address group = Ip4Address::from_string(group_str.c_str(), ec);
Ip4Address source = Ip4Address::from_string(source_str.c_str(), ec);
BGP_DEBUG_UT("Table " << tm->table_->name());
for (McastTreeManager::PartitionList::iterator it =
tm->partitions_.begin(); it != tm->partitions_.end(); ++it) {
McastSGEntry *sg_entry = (*it)->FindSGEntry(group, source);
if (sg_entry) {
McastSGEntry::ForwarderSet *forwarders =
sg_entry->forwarder_sets_[McastTreeManager::LevelNative];
TASK_UTIL_EXPECT_EQ(count, forwarders->size());
if (forwarders->size() > 1) {
BGP_DEBUG_UT(" McastSGEntry " << sg_entry->ToString() <<
" partition " << (*it)->part_id_);
}
for (McastSGEntry::ForwarderSet::iterator it =
forwarders->begin(); it != forwarders->end(); ++it) {
if (forwarders->size() > 1) {
VerifyForwarderProperties(tm->table_, *it);
VerifyForwarderLinks(*it);
} else {
VerifyOnlyForwarderProperties(tm->table_, *it);
}
}
return;
}
}
TASK_UTIL_EXPECT_EQ(0, count);
}
void VerifyForwarderCount(McastTreeManager *tm,
string group_str, size_t count) {
VerifyForwarderCount(tm, group_str, "0.0.0.0", count);
}
size_t VerifyTreeUpdateCount(McastTreeManager *tm) {
size_t total = 0;
for (int idx = 0; idx < DB::PartitionCount(); idx++) {
total += tm->partitions_[idx]->update_count_;
}
return total;
}
EventManager evm_;
BgpServer server_;
ErmVpnTable *red_table_;
ErmVpnTable *green_table_;
McastTreeManager *red_tm_;
McastTreeManager *green_tm_;
scoped_ptr<BgpInstanceConfig> master_cfg_;
scoped_ptr<BgpInstanceConfig> red_cfg_;
scoped_ptr<BgpInstanceConfig> green_cfg_;
std::vector<XmppPeerMock *> peers_;
};
TEST_F(BgpMulticastTest, Noop) {
}
TEST_F(BgpMulticastTest, Basic) {
peers_[0]->AddRoute(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 2);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", 1);
peers_[1]->AddRoute(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 3);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", 2);
peers_[0]->DelRoute(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 2);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", 1);
peers_[1]->DelRoute(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroup) {
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroupAddDel) {
AddRouteAllPeers(red_table_, "192.168.1.255");
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroupDuplicateAdd) {
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroupIncrementalAdd) {
AddRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kEvenPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kEvenPeerCount);
AddRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroupIncrementalDel) {
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kOddPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kOddPeerCount);
DelRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, SingleGroupRepeatedDelAdd) {
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
for (int idx = 0; idx < 5; idx++) {
DelRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kOddPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kOddPeerCount);
AddRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kEvenPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kEvenPeerCount);
AddRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
}
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleGroup) {
AddRouteAllPeers(red_table_, "192.168.1.253");
AddRouteAllPeers(red_table_, "192.168.1.254");
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.253");
DelRouteAllPeers(red_table_, "192.168.1.254");
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.253", 0);
VerifyForwarderCount(red_tm_, "192.168.1.254", 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleGroupDuplicateAdd) {
AddRouteAllPeers(red_table_, "192.168.1.253");
AddRouteAllPeers(red_table_, "192.168.1.254");
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
AddRouteAllPeers(red_table_, "192.168.1.253");
AddRouteAllPeers(red_table_, "192.168.1.254");
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.253");
DelRouteAllPeers(red_table_, "192.168.1.254");
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.253", 0);
VerifyForwarderCount(red_tm_, "192.168.1.254", 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleGroupIncrementalAdd) {
AddRouteOddPeers(red_table_, "192.168.1.253");
AddRouteOddPeers(red_table_, "192.168.1.254");
AddRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kOddPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kOddPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kOddPeerCount);
AddRouteEvenPeers(red_table_, "192.168.1.253");
AddRouteEvenPeers(red_table_, "192.168.1.254");
AddRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.253");
DelRouteAllPeers(red_table_, "192.168.1.254");
DelRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.253", 0);
VerifyForwarderCount(red_tm_, "192.168.1.254", 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleGroupIncrementalDel) {
AddRouteAllPeers(red_table_, "192.168.1.253");
AddRouteAllPeers(red_table_, "192.168.1.254");
AddRouteAllPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
DelRouteOddPeers(red_table_, "192.168.1.253");
DelRouteOddPeers(red_table_, "192.168.1.254");
DelRouteOddPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 3);
VerifyForwarderCount(red_tm_, "192.168.1.253", kEvenPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.254", kEvenPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kEvenPeerCount);
DelRouteEvenPeers(red_table_, "192.168.1.253");
DelRouteEvenPeers(red_table_, "192.168.1.254");
DelRouteEvenPeers(red_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifySGCount(red_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.253", 0);
VerifyForwarderCount(red_tm_, "192.168.1.254", 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleTableSingleGroup) {
AddRouteAllPeers(red_table_, "192.168.1.255");
AddRouteAllPeers(green_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, kPeerCount + 1);
VerifyRouteCount(green_table_, kPeerCount + 1);
VerifySGCount(red_tm_, 1);
VerifySGCount(green_tm_, 1);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
VerifyForwarderCount(green_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.255");
DelRouteAllPeers(green_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifyRouteCount(green_table_, 0);
VerifySGCount(red_tm_, 0);
VerifySGCount(green_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
VerifyForwarderCount(green_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, MultipleTableMultipleGroup) {
AddRouteAllPeers(red_table_, "192.168.1.254");
AddRouteAllPeers(red_table_, "192.168.1.255");
AddRouteAllPeers(green_table_, "192.168.1.254");
AddRouteAllPeers(green_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 2 * (kPeerCount + 1));
VerifyRouteCount(green_table_, 2 * (kPeerCount + 1));
VerifySGCount(red_tm_, 2);
VerifySGCount(green_tm_, 2);
VerifyForwarderCount(red_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(red_tm_, "192.168.1.255", kPeerCount);
VerifyForwarderCount(green_tm_, "192.168.1.254", kPeerCount);
VerifyForwarderCount(green_tm_, "192.168.1.255", kPeerCount);
DelRouteAllPeers(red_table_, "192.168.1.254");
DelRouteAllPeers(red_table_, "192.168.1.255");
DelRouteAllPeers(green_table_, "192.168.1.254");
DelRouteAllPeers(green_table_, "192.168.1.255");
task_util::WaitForIdle();
VerifyRouteCount(red_table_, 0);
VerifyRouteCount(green_table_, 0);
VerifySGCount(red_tm_, 0);
VerifySGCount(green_tm_, 0);
VerifyForwarderCount(red_tm_, "192.168.1.254", 0);
VerifyForwarderCount(red_tm_, "192.168.1.255", 0);
VerifyForwarderCount(green_tm_, "192.168.1.254", 0);
VerifyForwarderCount(green_tm_, "192.168.1.255", 0);
}
TEST_F(BgpMulticastTest, TreeUpdateCompression) {
TaskScheduler *scheduler = TaskScheduler::GetInstance();
scheduler->Stop();
AddRouteAllPeers(red_table_, "192.168.1.255");
scheduler->Start();
task_util::WaitForIdle();
TASK_UTIL_EXPECT_EQ(2, VerifyTreeUpdateCount(red_tm_));
scheduler->Stop();
DelRouteOddPeers(red_table_, "192.168.1.255");
scheduler->Start();
task_util::WaitForIdle();
TASK_UTIL_EXPECT_EQ(4, VerifyTreeUpdateCount(red_tm_));
scheduler->Stop();
DelRouteEvenPeers(red_table_, "192.168.1.255");
scheduler->Start();
task_util::WaitForIdle();
TASK_UTIL_EXPECT_EQ(6, VerifyTreeUpdateCount(red_tm_));
}
int main(int argc, char **argv) {
bgp_log_test::init();
::testing::InitGoogleTest(&argc, argv);
ControlNode::SetDefaultSchedulingPolicy();
int result = RUN_ALL_TESTS();
TaskScheduler::GetInstance()->Terminate();
return result;
}
|
Juniper/contrail-dev-controller
|
src/bgp/test/bgp_multicast_test.cc
|
C++
|
apache-2.0
| 26,426 | 37.023022 | 80 | 0.632143 | false |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.CodeDom;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Org.Apache.REEF.IMRU.OnREEF;
using Org.Apache.REEF.IMRU.OnREEF.MapInputWithControlMessage;
using Org.Apache.REEF.Network.Group.Config;
using Org.Apache.REEF.Network.Group.Pipelining;
using Org.Apache.REEF.Tang.Annotations;
using Org.Apache.REEF.Tang.Implementations.Tang;
using Org.Apache.REEF.Tang.Util;
using Org.Apache.REEF.Wake.Remote;
using Org.Apache.REEF.Wake.Remote.Impl;
using Org.Apache.REEF.Wake.StreamingCodec;
using Org.Apache.REEF.Wake.StreamingCodec.CommonStreamingCodecs;
namespace Org.Apache.REEF.IMRU.Tests
{
[TestClass]
public class MapInputWithControlMessageTests
{
/// <summary>
/// Tests the codec for TMapInputWithControlMessage
/// </summary>
[TestMethod]
public void TestMapInputWithControlMessageCodec()
{
float[] baseMessage = {0, 1};
var config = TangFactory.GetTang().NewConfigurationBuilder()
.BindImplementation(GenericType<IStreamingCodec<float[]>>.Class,
GenericType<FloatArrayStreamingCodec>.Class)
.Build();
IStreamingCodec<MapInputWithControlMessage<float[]>> codec =
TangFactory.GetTang().NewInjector(config).GetInstance<MapInputWithControlMessageCodec<float[]>>();
MemoryStream stream = new MemoryStream();
IDataWriter writer = new StreamDataWriter(stream);
codec.Write(new MapInputWithControlMessage<float[]>(baseMessage, MapControlMessage.AnotherRound), writer);
codec.Write(new MapInputWithControlMessage<float[]>(MapControlMessage.Stop), writer);
stream.Position = 0;
IDataReader reader = new StreamDataReader(stream);
var message1 = codec.Read(reader);
var message2 = codec.Read(reader);
Assert.AreEqual(message1.Message[0], baseMessage[0]);
Assert.AreEqual(message1.Message[1], baseMessage[1]);
Assert.IsNull(message2.Message);
Assert.AreEqual(message1.ControlMessage, MapControlMessage.AnotherRound);
Assert.AreEqual(message2.ControlMessage, MapControlMessage.Stop);
}
/// <summary>
/// Tests the pipelining Data converter for TMapInputWithControlMessage
/// </summary>
[TestMethod]
public void TestMapInputPipelining()
{
int chunkSize = 2;
var config = TangFactory.GetTang().NewConfigurationBuilder(
PipelineDataConverterConfiguration<int[]>.Conf
.Set(PipelineDataConverterConfiguration<int[]>.DataConverter,
GenericType<PipelineIntDataConverter>.Class)
.Build()).BindNamedParameter<ChunkSize, int>(
GenericType<ChunkSize>.Class,
chunkSize.ToString(CultureInfo.InvariantCulture)).Build();
IPipelineDataConverter<MapInputWithControlMessage<int[]>> dataConverter =
TangFactory.GetTang()
.NewInjector(config)
.GetInstance<MapInputwithControlMessagePipelineDataConverter<int[]>>();
int[] baseMessage = {1, 2, 3};
var chunks1 = dataConverter.PipelineMessage(new MapInputWithControlMessage<int[]>(baseMessage,
MapControlMessage.AnotherRound));
var chunks2 = dataConverter.PipelineMessage(new MapInputWithControlMessage<int[]>(MapControlMessage.Stop));
Assert.AreEqual(chunks1.Count, 2);
Assert.IsTrue(chunks1[0].Data.Message.Length == 2);
Assert.IsTrue(chunks1[1].Data.Message.Length == 1);
Assert.AreEqual(chunks1[0].Data.Message[0], baseMessage[0]);
Assert.AreEqual(chunks1[0].Data.Message[1], baseMessage[1]);
Assert.AreEqual(chunks1[1].Data.Message[0], baseMessage[2]);
Assert.AreEqual(chunks1[0].Data.ControlMessage, MapControlMessage.AnotherRound);
Assert.AreEqual(chunks1[1].Data.ControlMessage, MapControlMessage.AnotherRound);
Assert.AreEqual(chunks1[0].IsLast, false);
Assert.AreEqual(chunks1[1].IsLast, true);
Assert.AreEqual(chunks2.Count, 1);
Assert.IsNull(chunks2[0].Data.Message);
Assert.AreEqual(chunks2[0].Data.ControlMessage, MapControlMessage.Stop);
Assert.AreEqual(chunks2[0].IsLast, true);
var fullMessage1 = dataConverter.FullMessage(chunks1);
var fullMessage2 = dataConverter.FullMessage(chunks2);
Assert.AreEqual(fullMessage1.Message[0], baseMessage[0]);
Assert.AreEqual(fullMessage1.Message[1], baseMessage[1]);
Assert.AreEqual(fullMessage1.Message[2], baseMessage[2]);
Assert.AreEqual(fullMessage1.ControlMessage, chunks1[0].Data.ControlMessage);
Assert.IsNull(fullMessage2.Message);
Assert.AreEqual(fullMessage2.ControlMessage, chunks2[0].Data.ControlMessage);
}
[NamedParameter("Chunk size.")]
private sealed class ChunkSize : Name<int>
{
}
private class PipelineIntDataConverter : IPipelineDataConverter<int[]>
{
private readonly int _chunkSize;
[Inject]
private PipelineIntDataConverter([Parameter(typeof(ChunkSize))] int chunkSize)
{
_chunkSize = chunkSize;
}
public List<PipelineMessage<int[]>> PipelineMessage(int[] message)
{
var messageList = new List<PipelineMessage<int[]>>();
var totalChunks = message.Length / _chunkSize;
if (message.Length % _chunkSize != 0)
{
totalChunks++;
}
var counter = 0;
for (var i = 0; i < message.Length; i += _chunkSize)
{
var data = new int[Math.Min(_chunkSize, message.Length - i)];
Buffer.BlockCopy(message, i * sizeof(int), data, 0, data.Length * sizeof(int));
messageList.Add(counter == totalChunks - 1
? new PipelineMessage<int[]>(data, true)
: new PipelineMessage<int[]>(data, false));
counter++;
}
return messageList;
}
public int[] FullMessage(List<PipelineMessage<int[]>> pipelineMessage)
{
var size = pipelineMessage.Select(x => x.Data.Length).Sum();
var data = new int[size];
var offset = 0;
foreach (var message in pipelineMessage)
{
Buffer.BlockCopy(message.Data, 0, data, offset, message.Data.Length * sizeof(int));
offset += message.Data.Length * sizeof(int);
}
return data;
}
}
}
}
|
swlsw/incubator-reef
|
lang/cs/Org.Apache.REEF.IMRU.Tests/MapInputWithControlMessageTests.cs
|
C#
|
apache-2.0
| 8,037 | 40.210256 | 119 | 0.6285 | false |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.config.exceptions.ConflictException;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.config.exceptions.RecordNotFoundException;
import com.thoughtworks.go.domain.AccessToken;
import com.thoughtworks.go.helper.ConfigFileFixture;
import com.thoughtworks.go.server.dao.AccessTokenSqlMapDao;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.server.exceptions.InvalidAccessTokenException;
import com.thoughtworks.go.server.exceptions.RevokedAccessTokenException;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.junit.jupiter.api.Assertions.assertThrows;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class AccessTokenServiceIntegrationTest {
@Autowired
private DatabaseAccessHelper dbHelper;
@Autowired
private GoConfigService goConfigService;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private AccessTokenService accessTokenService;
@Autowired
private AccessTokenSqlMapDao accessTokenSqlMapDao;
private String authConfigId;
private GoConfigFileHelper configHelper;
@Before
public void setUp() throws Exception {
dbHelper.onSetUp();
authConfigId = "auth-config-1";
String content = ConfigFileFixture.configWithSecurity("<security>\n" +
" <authConfigs>\n" +
" <authConfig id=\"9cad79b0-4d9e-4a62-829c-eb4d9488062f\" pluginId=\"cd.go.authentication.passwordfile\">\n" +
" <property>\n" +
" <key>PasswordFilePath</key>\n" +
" <value>../manual-testing/ant_hg/password.properties</value>\n" +
" </property>\n" +
" </authConfig>\n" +
" </authConfigs>" +
"</security>");
configHelper = new GoConfigFileHelper(content);
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
goConfigService.forceNotifyListeners();
}
@After
public void tearDown() throws Exception {
dbHelper.onTearDown();
}
@Test
public void shouldCreateAnAccessToken() {
String tokenDescription = "This is my first token";
AccessToken.AccessTokenWithDisplayValue createdToken = accessTokenService.create(tokenDescription, "bob", authConfigId);
AccessToken fetchedToken = accessTokenService.find(createdToken.getId(), "bob");
assertThat(createdToken.getDescription()).isEqualTo(tokenDescription);
assertThat(createdToken.getValue()).isNotNull();
assertThat(createdToken.getDisplayValue()).isNotNull();
assertThat(createdToken.getCreatedAt()).isNotNull();
assertThat(createdToken.getLastUsed()).isNull();
assertThat(createdToken.isRevoked()).isFalse();
assertThat(fetchedToken.getValue()).isEqualTo(createdToken.getValue());
assertThat(fetchedToken.getDescription()).isEqualTo(createdToken.getDescription());
assertThat(fetchedToken.getCreatedAt()).isEqualTo(createdToken.getCreatedAt());
assertThat(fetchedToken.getLastUsed()).isNull();
assertThat(fetchedToken.isRevoked()).isEqualTo(createdToken.isRevoked());
}
@Test
public void shouldGetAccessTokenProvidedTokenValue() {
String tokenDescription = "This is my first Token";
AccessToken.AccessTokenWithDisplayValue createdToken = accessTokenService.create(tokenDescription, "bob", authConfigId);
String accessTokenInString = createdToken.getDisplayValue();
AccessToken fetchedToken = accessTokenService.findByAccessToken(accessTokenInString);
assertThat(fetchedToken).isEqualTo(createdToken);
}
@Test
public void shouldFailToGetAccessTokenWhenProvidedTokenLengthIsNotEqualTo40() {
InvalidAccessTokenException exception = assertThrows(InvalidAccessTokenException.class, () -> accessTokenService.findByAccessToken("my-access-token"));
assertThat("Invalid Personal Access Token.").isEqualTo(exception.getMessage());
}
@Test
public void shouldFailToGetAccessTokenWhenProvidedTokenContainsInvalidSaltId() {
String accessToken = RandomStringUtils.randomAlphanumeric(40);
InvalidAccessTokenException exception = assertThrows(InvalidAccessTokenException.class, () -> accessTokenService.findByAccessToken(accessToken));
assertThat("Invalid Personal Access Token.").isEqualTo(exception.getMessage());
}
@Test
public void shouldFailToGetAccessTokenWhenProvidedTokenHashEqualityFails() {
String tokenDescription = "This is my first Token";
AccessToken.AccessTokenWithDisplayValue createdToken = accessTokenService.create(tokenDescription, "bob", authConfigId);
String accessTokenInString = createdToken.getDisplayValue();
//replace last 5 characters to make the current token invalid
String invalidAccessToken = StringUtils.replace(accessTokenInString, accessTokenInString.substring(35), "abcde");
InvalidAccessTokenException exception = assertThrows(InvalidAccessTokenException.class, () -> accessTokenService.findByAccessToken(invalidAccessToken));
assertThat("Invalid Personal Access Token.").isEqualTo(exception.getMessage());
}
@Test
public void shouldNotGetAccessTokenProvidedTokenValueWhenTokenIsRevoked() {
String tokenDescription = "This is my first Token";
AccessToken.AccessTokenWithDisplayValue createdToken = accessTokenService.create(tokenDescription, "bob", authConfigId);
accessTokenService.revokeAccessToken(createdToken.getId(), "BOB", null);
String accessTokenInString = createdToken.getDisplayValue();
RevokedAccessTokenException exception = assertThrows(RevokedAccessTokenException.class, () -> accessTokenService.findByAccessToken(accessTokenInString));
assertThat(exception.getMessage()).startsWith("Invalid Personal Access Token. Access token was revoked at: ");
}
@Test
public void shouldRevokeAnAccessToken() {
String tokenDescription = "This is my first Token";
AccessToken createdToken = accessTokenService.create(tokenDescription, "BOB", authConfigId);
assertThat(createdToken.isRevoked()).isFalse();
assertThat(createdToken.getRevokeCause()).isBlank();
accessTokenService.revokeAccessToken(createdToken.getId(), "bob", "blah");
AccessToken tokenAfterRevoking = accessTokenService.find(createdToken.getId(), "bOb");
assertThat(tokenAfterRevoking.isRevoked()).isTrue();
assertThat(tokenAfterRevoking.getRevokeCause()).isEqualTo("blah");
}
@Test
public void shouldFailToRevokeAnAlreadyRevokedAccessToken() {
String tokenDescription = "This is my first Token";
AccessToken createdToken = accessTokenService.create(tokenDescription, "BOB", authConfigId);
assertThat(createdToken.isRevoked()).isFalse();
accessTokenService.revokeAccessToken(createdToken.getId(), "bOb", null);
AccessToken tokenAfterRevoking = accessTokenService.find(createdToken.getId(), "bOb");
assertThat(tokenAfterRevoking.isRevoked()).isTrue();
assertThatCode(() -> accessTokenService.revokeAccessToken(createdToken.getId(), "bOb", null))
.isInstanceOf(ConflictException.class)
.hasMessage("Access token has already been revoked!");
}
@Test
public void shouldFailToRevokeNonExistingAccessToken() {
long id = 42;
assertThatCode(() -> accessTokenService.revokeAccessToken(id, "bOb", null))
.isEqualTo(new RecordNotFoundException(EntityType.AccessToken, 42));
}
@Test
public void shouldUpdateLastUsedTimeToDB() {
AccessToken createdToken = accessTokenService.create("This is my first Token", "BOB", authConfigId);
final AccessToken fetchedFromDB = accessTokenService.find(createdToken.getId(), createdToken.getUsername());
assertThat(fetchedFromDB.getLastUsed()).isNull();
accessTokenService.updateLastUsedCacheWith(createdToken);
accessTokenService.onTimer();
final AccessToken accessToken = accessTokenService.find(createdToken.getId(), createdToken.getUsername());
assertThat(accessToken.getLastUsed()).isNotNull();
}
}
|
varshavaradarajan/gocd
|
server/src/test-integration/java/com/thoughtworks/go/server/service/AccessTokenServiceIntegrationTest.java
|
Java
|
apache-2.0
| 9,904 | 44.223744 | 161 | 0.732229 | false |
/**
* JBoss, Home of Professional Open Source
* Copyright Red Hat, Inc., and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.unifiedpush.jpa.dao.impl;
import java.util.List;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import javax.persistence.LockModeType;
import javax.persistence.TypedQuery;
import org.hibernate.Session;
import org.jboss.aerogear.unifiedpush.dao.GenericBaseDao;
public abstract class JPABaseDao<T, K> implements GenericBaseDao<T, K> {
@Inject
protected EntityManager entityManager;
/**
* Hook to manually inject an EntityManager.
*
* @param entityManager the EntityManager for this DAO class
*/
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
protected TypedQuery<T> createQuery(String jpql) {
return entityManager.createQuery(jpql, getType());
}
protected Query createUntypedQuery(String jpql) {
return entityManager.createQuery(jpql);
}
protected <O> TypedQuery<O> createQuery(String jpql, Class<O> type) {
return entityManager.createQuery(jpql, type);
}
protected org.hibernate.Query createHibernateQuery(String hql) {
Session session = (Session) entityManager.getDelegate();
return session.createQuery(hql);
}
//because you can't do T.class
public abstract Class<T> getType();
@Override
public T find(K id) {
return entityManager.find(getType(), id);
}
@Override
public void create(T entity) {
entityManager.persist(entity);
}
@Override
public void update(T entity) {
entityManager.merge(entity);
entityManager.flush();
}
/**
* Pessimistic write lock on entity
*/
@Override
public void lock(T entity) {
entityManager.lock(entity, LockModeType.PESSIMISTIC_WRITE);
}
@Override
public void delete(T entity) {
if (entity != null) {
// making sure the entity in question,
// is really part of this transaction
if (! entityManager.contains(entity)) {
final Object entityId = entityManager.getEntityManagerFactory().getPersistenceUnitUtil().getIdentifier(entity);
entity = entityManager.getReference(getType(), entityId);
}
entityManager.remove(entity);
}
}
/**
* Write pending objects to the database and
* clear session-scoped cache
*/
@Override
public void flushAndClear() {
entityManager.flush();
entityManager.clear();
}
protected T getSingleResultForQuery(TypedQuery<T> query) {
List<T> result = query.getResultList();
if (!result.isEmpty()) {
return result.get(0);
} else {
return null;
}
}
}
|
diogoalbuquerque/aerogear-unifiedpush-server
|
model/jpa/src/main/java/org/jboss/aerogear/unifiedpush/jpa/dao/impl/JPABaseDao.java
|
Java
|
apache-2.0
| 3,473 | 27.467213 | 127 | 0.663691 | false |
/*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data.spi.predicates;
import com.google.common.base.Objects;
import javax.annotation.Nullable;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.avro.generic.GenericData.Record;
public class TestRegisteredPredicateToFromString {
private static final Schema SCHEMA = SchemaBuilder.record("Event").fields()
.requiredString("id")
.requiredLong("timestamp")
.requiredString("color")
.endRecord();
private static final Schema STRING = Schema.create(Schema.Type.STRING);
@Test
public void testExists() {
Exists<Record> exists = Predicates.exists();
Assert.assertEquals("", exists.toString(SCHEMA));
Assert.assertEquals(
"exists()", RegisteredPredicate.toString(exists, SCHEMA));
Assert.assertEquals(
exists, RegisteredPredicate.<Record>fromString("exists()", SCHEMA));
}
/**
* A test RegisteredPredicate. Do not use this class elsewhere because it
* uses toString rather than supporting CharSequences directly.
*/
public static class Contains<T> extends RegisteredPredicate<T> {
static {
RegisteredPredicate.register("contains", new Factory() {
@Override
public <T> RegisteredPredicate<T> fromString(String predicate, Schema schema) {
return new Contains<T>(predicate);
}
});
}
private final String contained;
public Contains(String contained) {
this.contained = contained;
}
@Override
public String getName() {
return "contains";
}
@Override
public String toString(Schema schema) {
return contained;
}
@Override
public boolean apply(@Nullable T value) {
return value != null && value.toString().contains(contained);
}
@Override
public int hashCode() {
return Objects.hashCode(contained);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Contains other = (Contains) obj;
return Objects.equal(contained, other.contained);
}
}
public static Contains<String> contains(String contained) {
return new Contains<String>(contained);
}
@Test
public void testContains() {
Contains<String> a = contains("a");
Contains<String> b = contains("b");
Assert.assertEquals("Should wrap delegate toString in name function",
"contains(a)", RegisteredPredicate.toString(a, STRING));
Assert.assertEquals("Should wrap delegate toString in name function",
"contains(b)", RegisteredPredicate.toString(b, STRING));
Assert.assertEquals("Should produce equivalent contains(a)",
a, RegisteredPredicate.<String>fromString("contains(a)", STRING));
}
}
|
dlanza1/kite
|
kite-data/kite-data-core/src/test/java/org/kitesdk/data/spi/predicates/TestRegisteredPredicateToFromString.java
|
Java
|
apache-2.0
| 3,485 | 29.043103 | 87 | 0.684075 | false |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_20) on Wed Mar 30 21:34:43 CST 2011 -->
<TITLE>
Uses of Class org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater (Facebook's realtime distributed database, powered by Apache Hadoop based on 0.20-append branch 0.20.1-dev API)
</TITLE>
<META NAME="date" CONTENT="2011-03-30">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater (Facebook's realtime distributed database, powered by Apache Hadoop based on 0.20-append branch 0.20.1-dev API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.html" title="class in org.apache.hadoop.io.compress.zlib"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/hadoop/io/compress/zlib//class-useBuiltInZlibDeflater.html" target="_top"><B>FRAMES</B></A>
<A HREF="BuiltInZlibDeflater.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater</B></H2>
</CENTER>
No usage of org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.html" title="class in org.apache.hadoop.io.compress.zlib"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/hadoop/io/compress/zlib//class-useBuiltInZlibDeflater.html" target="_top"><B>FRAMES</B></A>
<A HREF="BuiltInZlibDeflater.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
This release is based on the Facebook's version of Hadoop.<br>Copyright © 2009 The Apache Software Foundation.
</BODY>
</HTML>
|
submergerock/avatar-hadoop
|
docs/api/org/apache/hadoop/io/compress/zlib/class-use/BuiltInZlibDeflater.html
|
HTML
|
apache-2.0
| 6,505 | 44.173611 | 251 | 0.620753 | false |
/*
* Copyright (C) 2014 Dell, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dell.doradus.common;
import java.io.StringWriter;
import java.util.Map;
import java.util.Stack;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.AttributesImpl;
/**
* Creates XML documents by writing to a StringWriter.
*/
final public class XMLBuilder {
private final static Attributes EMPTY_ATTS = new AttributesImpl();
private final Stack<String> m_tagStack = new Stack<String>();
private final StringWriter m_stringWriter = new StringWriter();
private final int m_indent;
private final String m_prefix;
// Default constructor creates an XML Builder that generates the unformatted XML.
// Use XMLBuilder(indent) constructor to generate the formatted XML.
public XMLBuilder(){
this(0);
}
// Creates an XMLBuilder that generates the formatted XML
public XMLBuilder(int indent) {
m_indent = indent;
StringBuilder buffer = new StringBuilder();
for (int index = 0; index < indent; index++) {
buffer.append(' ');
}
m_prefix = buffer.toString();
}
// Start a new XML document.
public void startDocument() {
m_stringWriter.write("<?xml version=\"1.0\" standalone=\"yes\"?>\n");
m_tagStack.clear();
} // startDocument
// Finish the current XML document.
public void endDocument() {
if (m_tagStack.size() != 0) {
throw new RuntimeException("XML 'endDocument' with unfinished tags");
}
m_stringWriter.write('\n');
} // endDocument
// Start a new XML element using the given start tag only.
public void startElement(String elemName) {
writeStartElement(elemName, EMPTY_ATTS);
m_tagStack.push(elemName);
} // startElement
// Start a new XML element using the given start tag and single attribute.
public void startElement(String elemName, String attrName, String attrValue) {
AttributesImpl attrs = new AttributesImpl();
attrs.addAttribute("", attrName, "", "CDATA", attrValue);
writeStartElement(elemName, attrs);
m_tagStack.push(elemName);
} // startElement
// Start a new XML element using the given name and attribute set.
public void startElement(String elemName, Attributes attrs) {
writeStartElement(elemName, attrs);
m_tagStack.push(elemName);
} // startElement
// Same as above but using an attribute map.
public void startElement(String elemName, Map<String,String> attrs) {
startElement(elemName, toAttributes(attrs));
} // startElement
// Finish the outer-most XML element that was started.
public void endElement() {
if (m_tagStack.size() == 0) {
throw new RuntimeException("XML 'endElement' with no unfinished tags");
}
writeEndElement(m_tagStack.pop());
} // endElement
// Add an element, including start and end tags, with the content as text data within.
public void addDataElement(String elemName, String content) {
writeDataElement(elemName, EMPTY_ATTS, content);
} // addDataElement
// Same as above but with a single attribute name/value pair as well.
public void addDataElement(String elemName, String content, String attrName, String attrValue) {
AttributesImpl attrs = new AttributesImpl();
attrs.addAttribute("", attrName, "", "CDATA", attrValue);
writeDataElement(elemName, attrs, content);
} // addDataElement
// Same as above but using an attribute set.
public void addDataElement(String elemName, String content, Attributes attrs) {
writeDataElement(elemName, attrs, content);
} // addDataElement
// Same as above but using an attribute map.
public void addDataElement(String elemName, String content, Map<String,String> attrs) {
addDataElement(elemName, content, toAttributes(attrs));
} // addDataElement
@Override
public String toString() {
if (m_tagStack.size() != 0) {
throw new RuntimeException("Stack is not empty");
}
return m_stringWriter.toString();
} // toString
// Converts attribute map to Attributes class instance.
private Attributes toAttributes(Map<String,String> attrs){
AttributesImpl impl = new AttributesImpl();
for (Map.Entry<String,String> attr : attrs.entrySet()){
impl.addAttribute("", attr.getKey(), "", "CDATA", attr.getValue());
}
return impl;
} // toAttributes
private void writeStartElement(String elemName, Attributes atts) {
for (int level = 0; level < m_tagStack.size(); level++) {
m_stringWriter.write(m_prefix);
}
m_stringWriter.write('<');
m_stringWriter.write(elemName);
writeAttributes(atts);
m_stringWriter.write('>');
if (m_indent > 0) {
m_stringWriter.write('\n');
}
}
private void writeEndElement(String elemName) {
for (int level = 0; level < m_tagStack.size(); level++) {
m_stringWriter.write(m_prefix);
}
m_stringWriter.write("</");
m_stringWriter.write(elemName);
m_stringWriter.write('>');
if (m_indent > 0) {
m_stringWriter.write('\n');
}
}
private void writeDataElement(String elemName, Attributes atts, String content) {
startElement(elemName, atts);
writeCharacters(content);
endElement();
}
private void writeAttributes(Attributes atts) {
for (int i = 0; i < atts.getLength(); i++) {
char value[] = atts.getValue(i).toCharArray();
m_stringWriter.write(' ');
m_stringWriter.write(atts.getLocalName(i));
m_stringWriter.write("=\"");
writeEscaped(value, 0, value.length, true);
m_stringWriter.write('"');
}
}
public void writeCharacters(String data) {
for (int level = 0; level < m_tagStack.size(); level++) {
m_stringWriter.write(m_prefix);
}
char ch[] = data.toCharArray();
writeCharacters(ch, 0, ch.length);
if (m_indent > 0) {
m_stringWriter.write('\n');
}
}
public void writeCharacters(char ch[], int start, int len) {
writeEscaped(ch, start, len, false);
}
private void writeEscaped(char ch[], int start, int length, boolean isAttVal) {
for (int i = start; i < start + length; i++) {
switch (ch[i]) {
case '&':
m_stringWriter.write("&");
break;
case '<':
m_stringWriter.write("<");
break;
case '>':
m_stringWriter.write(">");
break;
case '\"':
if (isAttVal) {
m_stringWriter.write(""");
} else {
m_stringWriter.write('\"');
}
break;
default:
if (ch[i] > '\u007f') {
m_stringWriter.write("&#");
m_stringWriter.write(Integer.toString(ch[i]));
m_stringWriter.write(';');
} else {
m_stringWriter.write(ch[i]);
}
}
}
}
} // class XMLBuilder
|
kod3r/Doradus
|
doradus-common/src/main/java/com/dell/doradus/common/XMLBuilder.java
|
Java
|
apache-2.0
| 8,264 | 34.566372 | 100 | 0.580227 | false |
define([
'../../Core/defaultValue',
'../../Core/defined',
'../../Core/defineProperties',
'../../Core/DeveloperError',
'../../Core/Event',
'../../Core/wrapFunction',
'../../DataSources/CzmlDataSource',
'../../DataSources/GeoJsonDataSource',
'../../DataSources/KmlDataSource',
'../getElement'
], function(
defaultValue,
defined,
defineProperties,
DeveloperError,
Event,
wrapFunction,
CzmlDataSource,
GeoJsonDataSource,
KmlDataSource,
getElement) {
'use strict';
/**
* A mixin which adds default drag and drop support for CZML files to the Viewer widget.
* Rather than being called directly, this function is normally passed as
* a parameter to {@link Viewer#extend}, as shown in the example below.
* @exports viewerDragDropMixin
*
* @param {Viewer} viewer The viewer instance.
* @param {Object} [options] Object with the following properties:
* @param {Element|String} [options.dropTarget=viewer.container] The DOM element which will serve as the drop target.
* @param {Boolean} [options.clearOnDrop=true] When true, dropping files will clear all existing data sources first, when false, new data sources will be loaded after the existing ones.
* @param {Boolean} [options.flyToOnDrop=true] When true, dropping files will fly to the data source once it is loaded.
* @param {Boolean} [options.clampToGround=true] When true, datasources are clamped to the ground.
* @param {DefaultProxy} [options.proxy] The proxy to be used for KML network links.
*
* @exception {DeveloperError} Element with id <options.dropTarget> does not exist in the document.
* @exception {DeveloperError} dropTarget is already defined by another mixin.
* @exception {DeveloperError} dropEnabled is already defined by another mixin.
* @exception {DeveloperError} dropError is already defined by another mixin.
* @exception {DeveloperError} clearOnDrop is already defined by another mixin.
*
* @example
* // Add basic drag and drop support and pop up an alert window on error.
* var viewer = new Cesium.Viewer('cesiumContainer');
* viewer.extend(Cesium.viewerDragDropMixin);
* viewer.dropError.addEventListener(function(viewerArg, source, error) {
* window.alert('Error processing ' + source + ':' + error);
* });
*/
function viewerDragDropMixin(viewer, options) {
//>>includeStart('debug', pragmas.debug);
if (!defined(viewer)) {
throw new DeveloperError('viewer is required.');
}
if (viewer.hasOwnProperty('dropTarget')) {
throw new DeveloperError('dropTarget is already defined by another mixin.');
}
if (viewer.hasOwnProperty('dropEnabled')) {
throw new DeveloperError('dropEnabled is already defined by another mixin.');
}
if (viewer.hasOwnProperty('dropError')) {
throw new DeveloperError('dropError is already defined by another mixin.');
}
if (viewer.hasOwnProperty('clearOnDrop')) {
throw new DeveloperError('clearOnDrop is already defined by another mixin.');
}
if (viewer.hasOwnProperty('flyToOnDrop')) {
throw new DeveloperError('flyToOnDrop is already defined by another mixin.');
}
//>>includeEnd('debug');
options = defaultValue(options, defaultValue.EMPTY_OBJECT);
//Local variables to be closed over by defineProperties.
var dropEnabled = true;
var flyToOnDrop = defaultValue(options.flyToOnDrop, true);
var dropError = new Event();
var clearOnDrop = defaultValue(options.clearOnDrop, true);
var dropTarget = defaultValue(options.dropTarget, viewer.container);
var clampToGround = defaultValue(options.clampToGround, true);
var proxy = options.proxy;
dropTarget = getElement(dropTarget);
defineProperties(viewer, {
/**
* Gets or sets the element to serve as the drop target.
* @memberof viewerDragDropMixin.prototype
* @type {Element}
*/
dropTarget : {
//TODO See https://github.com/AnalyticalGraphicsInc/cesium/issues/832
get : function() {
return dropTarget;
},
set : function(value) {
//>>includeStart('debug', pragmas.debug);
if (!defined(value)) {
throw new DeveloperError('value is required.');
}
//>>includeEnd('debug');
unsubscribe(dropTarget, handleDrop);
dropTarget = value;
subscribe(dropTarget, handleDrop);
}
},
/**
* Gets or sets a value indicating if drag and drop support is enabled.
* @memberof viewerDragDropMixin.prototype
* @type {Element}
*/
dropEnabled : {
get : function() {
return dropEnabled;
},
set : function(value) {
if (value !== dropEnabled) {
if (value) {
subscribe(dropTarget, handleDrop);
} else {
unsubscribe(dropTarget, handleDrop);
}
dropEnabled = value;
}
}
},
/**
* Gets the event that will be raised when an error is encountered during drop processing.
* @memberof viewerDragDropMixin.prototype
* @type {Event}
*/
dropError : {
get : function() {
return dropError;
}
},
/**
* Gets or sets a value indicating if existing data sources should be cleared before adding the newly dropped sources.
* @memberof viewerDragDropMixin.prototype
* @type {Boolean}
*/
clearOnDrop : {
get : function() {
return clearOnDrop;
},
set : function(value) {
clearOnDrop = value;
}
},
/**
* Gets or sets a value indicating if the camera should fly to the data source after it is loaded.
* @memberof viewerDragDropMixin.prototype
* @type {Boolean}
*/
flyToOnDrop : {
get : function() {
return flyToOnDrop;
},
set : function(value) {
flyToOnDrop = value;
}
},
/**
* Gets or sets the proxy to be used for KML.
* @memberof viewerDragDropMixin.prototype
* @type {DefaultProxy}
*/
proxy : {
get : function() {
return proxy;
},
set : function(value) {
proxy = value;
}
},
/**
* Gets or sets a value indicating if the datasources should be clamped to the ground
* @memberof viewerDragDropMixin.prototype
* @type {Boolean}
*/
clampToGround : {
get : function() {
return clampToGround;
},
set : function(value) {
clampToGround = value;
}
}
});
function handleDrop(event) {
stop(event);
if (clearOnDrop) {
viewer.entities.removeAll();
viewer.dataSources.removeAll();
}
var files = event.dataTransfer.files;
var length = files.length;
for (var i = 0; i < length; i++) {
var file = files[i];
var reader = new FileReader();
reader.onload = createOnLoadCallback(viewer, file, proxy, clampToGround);
reader.onerror = createDropErrorCallback(viewer, file);
reader.readAsText(file);
}
}
//Enable drop by default;
subscribe(dropTarget, handleDrop);
//Wrap the destroy function to make sure all events are unsubscribed from
viewer.destroy = wrapFunction(viewer, viewer.destroy, function() {
viewer.dropEnabled = false;
});
//Specs need access to handleDrop
viewer._handleDrop = handleDrop;
}
function stop(event) {
event.stopPropagation();
event.preventDefault();
}
function unsubscribe(dropTarget, handleDrop) {
var currentTarget = dropTarget;
if (defined(currentTarget)) {
currentTarget.removeEventListener('drop', handleDrop, false);
currentTarget.removeEventListener('dragenter', stop, false);
currentTarget.removeEventListener('dragover', stop, false);
currentTarget.removeEventListener('dragexit', stop, false);
}
}
function subscribe(dropTarget, handleDrop) {
dropTarget.addEventListener('drop', handleDrop, false);
dropTarget.addEventListener('dragenter', stop, false);
dropTarget.addEventListener('dragover', stop, false);
dropTarget.addEventListener('dragexit', stop, false);
}
function createOnLoadCallback(viewer, file, proxy, clampToGround) {
var scene = viewer.scene;
return function(evt) {
var fileName = file.name;
try {
var loadPromise;
if (/\.czml$/i.test(fileName)) {
loadPromise = CzmlDataSource.load(JSON.parse(evt.target.result), {
sourceUri : fileName
});
} else if (/\.geojson$/i.test(fileName) || /\.json$/i.test(fileName) || /\.topojson$/i.test(fileName)) {
loadPromise = GeoJsonDataSource.load(JSON.parse(evt.target.result), {
sourceUri : fileName,
clampToGround : clampToGround
});
} else if (/\.(kml|kmz)$/i.test(fileName)) {
loadPromise = KmlDataSource.load(file, {
sourceUri : fileName,
proxy : proxy,
camera : scene.camera,
canvas : scene.canvas,
clampToGround: clampToGround
});
} else {
viewer.dropError.raiseEvent(viewer, fileName, 'Unrecognized file: ' + fileName);
return;
}
if (defined(loadPromise)) {
viewer.dataSources.add(loadPromise).then(function(dataSource) {
if (viewer.flyToOnDrop) {
viewer.flyTo(dataSource);
}
}).otherwise(function(error) {
viewer.dropError.raiseEvent(viewer, fileName, error);
});
}
} catch (error) {
viewer.dropError.raiseEvent(viewer, fileName, error);
}
};
}
function createDropErrorCallback(viewer, file) {
return function(evt) {
viewer.dropError.raiseEvent(viewer, file.name, evt.target.error);
};
}
return viewerDragDropMixin;
});
|
soceur/cesium
|
Source/Widgets/Viewer/viewerDragDropMixin.js
|
JavaScript
|
apache-2.0
| 11,834 | 37.8 | 189 | 0.5289 | false |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_36) on Mon Jan 04 20:30:30 CST 2016 -->
<title>TaskMessageSerializer</title>
<meta name="date" content="2016-01-04">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="TaskMessageSerializer";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../io/gearpump/streaming/task/TaskInterface.html" title="interface in io.gearpump.streaming.task"><span class="strong">PREV CLASS</span></a></li>
<li><a href="../../../../io/gearpump/streaming/task/TaskUtil.html" title="class in io.gearpump.streaming.task"><span class="strong">NEXT CLASS</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?io/gearpump/streaming/task/TaskMessageSerializer.html" target="_top">FRAMES</a></li>
<li><a href="TaskMessageSerializer.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>SUMMARY: </li>
<li>NESTED | </li>
<li>FIELD | </li>
<li>CONSTR | </li>
<li><a href="#method_summary">METHOD</a></li>
</ul>
<ul class="subNavList">
<li>DETAIL: </li>
<li>FIELD | </li>
<li>CONSTR | </li>
<li><a href="#method_detail">METHOD</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<p class="subTitle">io.gearpump.streaming.task</p>
<h2 title="Interface TaskMessageSerializer" class="title">Interface TaskMessageSerializer<T></h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Known Implementing Classes:</dt>
<dd><a href="../../../../io/gearpump/streaming/AckRequestSerializer.html" title="class in io.gearpump.streaming">AckRequestSerializer</a>, <a href="../../../../io/gearpump/streaming/AckSerializer.html" title="class in io.gearpump.streaming">AckSerializer</a>, <a href="../../../../io/gearpump/streaming/InitialAckRequestSerializer.html" title="class in io.gearpump.streaming">InitialAckRequestSerializer</a>, <a href="../../../../io/gearpump/streaming/LatencyProbeSerializer.html" title="class in io.gearpump.streaming">LatencyProbeSerializer</a>, <a href="../../../../io/gearpump/streaming/task/SerializedMessageSerializer.html" title="class in io.gearpump.streaming.task">SerializedMessageSerializer</a>, <a href="../../../../io/gearpump/streaming/TaskIdSerializer.html" title="class in io.gearpump.streaming">TaskIdSerializer</a></dd>
</dl>
<hr>
<br>
<pre>public interface <strong>TaskMessageSerializer<T></strong></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><code><strong><a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html#getLength(T)">getLength</a></strong>(<a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a> obj)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a></code></td>
<td class="colLast"><code><strong><a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html#read(java.io.DataInput)">read</a></strong>(java.io.DataInput dataInput)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html#write(java.io.DataOutput, T)">write</a></strong>(java.io.DataOutput dataOutput,
<a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a> obj)</code> </td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="write(java.io.DataOutput,java.lang.Object)">
<!-- -->
</a><a name="write(java.io.DataOutput, T)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>write</h4>
<pre>void write(java.io.DataOutput dataOutput,
<a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a> obj)</pre>
</li>
</ul>
<a name="read(java.io.DataInput)">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>read</h4>
<pre><a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a> read(java.io.DataInput dataInput)</pre>
</li>
</ul>
<a name="getLength(java.lang.Object)">
<!-- -->
</a><a name="getLength(T)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>getLength</h4>
<pre>int getLength(<a href="../../../../io/gearpump/streaming/task/TaskMessageSerializer.html" title="type parameter in TaskMessageSerializer">T</a> obj)</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../io/gearpump/streaming/task/TaskInterface.html" title="interface in io.gearpump.streaming.task"><span class="strong">PREV CLASS</span></a></li>
<li><a href="../../../../io/gearpump/streaming/task/TaskUtil.html" title="class in io.gearpump.streaming.task"><span class="strong">NEXT CLASS</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?io/gearpump/streaming/task/TaskMessageSerializer.html" target="_top">FRAMES</a></li>
<li><a href="TaskMessageSerializer.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>SUMMARY: </li>
<li>NESTED | </li>
<li>FIELD | </li>
<li>CONSTR | </li>
<li><a href="#method_summary">METHOD</a></li>
</ul>
<ul class="subNavList">
<li>DETAIL: </li>
<li>FIELD | </li>
<li>CONSTR | </li>
<li><a href="#method_detail">METHOD</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
gearpump/gearpump.github.io
|
releases/0.7.3/api/java/io/gearpump/streaming/task/TaskMessageSerializer.html
|
HTML
|
apache-2.0
| 9,355 | 37.979167 | 837 | 0.645965 | false |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.elasticsearch.dao;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import org.apache.metron.indexing.dao.RetrieveLatestDao;
import org.apache.metron.indexing.dao.search.GetRequest;
import org.apache.metron.indexing.dao.update.Document;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
public class ElasticsearchRetrieveLatestDao implements RetrieveLatestDao {
private TransportClient transportClient;
public ElasticsearchRetrieveLatestDao(TransportClient transportClient) {
this.transportClient = transportClient;
}
@Override
public Document getLatest(String guid, String sensorType) {
Optional<Document> doc = searchByGuid(guid, sensorType, hit -> toDocument(guid, hit));
return doc.orElse(null);
}
@Override
public Iterable<Document> getAllLatest(List<GetRequest> getRequests) {
Collection<String> guids = new HashSet<>();
Collection<String> sensorTypes = new HashSet<>();
for (GetRequest getRequest : getRequests) {
guids.add(getRequest.getGuid());
sensorTypes.add(getRequest.getSensorType());
}
List<Document> documents = searchByGuids(
guids,
sensorTypes,
hit -> {
Long ts = 0L;
String doc = hit.getSourceAsString();
String sourceType = Iterables.getFirst(Splitter.on("_doc").split(hit.getType()), null);
try {
return Optional.of(new Document(doc, hit.getId(), sourceType, ts));
} catch (IOException e) {
throw new IllegalStateException("Unable to retrieve latest: " + e.getMessage(), e);
}
}
);
return documents;
}
<T> Optional<T> searchByGuid(String guid, String sensorType,
Function<SearchHit, Optional<T>> callback) {
Collection<String> sensorTypes = sensorType != null ? Collections.singleton(sensorType) : null;
List<T> results = searchByGuids(Collections.singleton(guid), sensorTypes, callback);
if (results.size() > 0) {
return Optional.of(results.get(0));
} else {
return Optional.empty();
}
}
/**
* Return the search hit based on the UUID and sensor type.
* A callback can be specified to transform the hit into a type T.
* If more than one hit happens, the first one will be returned.
*/
<T> List<T> searchByGuids(Collection<String> guids, Collection<String> sensorTypes,
Function<SearchHit, Optional<T>> callback) {
if (guids == null || guids.isEmpty()) {
return Collections.emptyList();
}
QueryBuilder query = null;
IdsQueryBuilder idsQuery;
if (sensorTypes != null) {
String[] types = sensorTypes.stream().map(sensorType -> sensorType + "_doc")
.toArray(String[]::new);
idsQuery = QueryBuilders.idsQuery(types);
} else {
idsQuery = QueryBuilders.idsQuery();
}
for (String guid : guids) {
query = idsQuery.addIds(guid);
}
SearchRequestBuilder request = transportClient.prepareSearch()
.setQuery(query)
.setSize(guids.size());
org.elasticsearch.action.search.SearchResponse response = request.get();
SearchHits hits = response.getHits();
List<T> results = new ArrayList<>();
for (SearchHit hit : hits) {
Optional<T> result = callback.apply(hit);
if (result.isPresent()) {
results.add(result.get());
}
}
return results;
}
private Optional<Document> toDocument(final String guid, SearchHit hit) {
Long ts = 0L;
String doc = hit.getSourceAsString();
String sourceType = toSourceType(hit.getType());
try {
return Optional.of(new Document(doc, guid, sourceType, ts));
} catch (IOException e) {
throw new IllegalStateException("Unable to retrieve latest: " + e.getMessage(), e);
}
}
/**
* Returns the source type based on a given doc type.
* @param docType The document type.
* @return The source type.
*/
private String toSourceType(String docType) {
return Iterables.getFirst(Splitter.on("_doc").split(docType), null);
}
}
|
cestella/incubator-metron
|
metron-platform/metron-elasticsearch/src/main/java/org/apache/metron/elasticsearch/dao/ElasticsearchRetrieveLatestDao.java
|
Java
|
apache-2.0
| 5,420 | 34.89404 | 99 | 0.700738 | false |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.cellbase.core.api;
import org.opencb.biodata.models.core.Region;
import org.opencb.biodata.models.core.GenomeSequenceFeature;
import org.opencb.biodata.models.core.GenomicScoreRegion;
import org.opencb.commons.datastore.core.Query;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.commons.datastore.core.QueryParam;
import org.opencb.commons.datastore.core.QueryResult;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.opencb.commons.datastore.core.QueryParam.Type.STRING;
/**
* Created by imedina on 30/11/15.
*/
public interface GenomeDBAdaptor extends CellBaseDBAdaptor {
enum QueryParams implements QueryParam {
REGION("region", STRING, "");
QueryParams(String key, Type type, String description) {
this.key = key;
this.type = type;
this.description = description;
}
private final String key;
private Type type;
private String description;
@Override
public String key() {
return key;
}
@Override
public String description() {
return description;
}
@Override
public Type type() {
return type;
}
}
QueryResult getGenomeInfo(QueryOptions queryOptions);
QueryResult getChromosomeInfo(String chromosomeId, QueryOptions queryOptions);
@Deprecated
QueryResult<GenomeSequenceFeature> getGenomicSequence(Query query, QueryOptions queryOptions);
@Deprecated
default List<QueryResult<GenomeSequenceFeature>> getGenomicSequence(List<Query> queries, QueryOptions queryOptions) {
List<QueryResult<GenomeSequenceFeature>> queryResults = new ArrayList<>(queries.size());
queryResults.addAll(queries.stream().map(query -> getGenomicSequence(query, queryOptions)).collect(Collectors.toList()));
return queryResults;
}
QueryResult<GenomeSequenceFeature> getSequence(Region region, QueryOptions queryOptions);
default List<QueryResult<GenomeSequenceFeature>> getSequence(List<Region> regions, QueryOptions queryOptions) {
List<QueryResult<GenomeSequenceFeature>> queryResults = new ArrayList<>(regions.size());
queryResults.addAll(regions.stream().map(region -> getSequence(region, queryOptions)).collect(Collectors.toList()));
return queryResults;
}
// default QueryResult<ConservationScoreRegion> getConservation(Region region, QueryOptions queryOptions) {
default QueryResult<GenomicScoreRegion<Float>> getConservation(Region region, QueryOptions queryOptions) {
return getConservation(Collections.singletonList(region), queryOptions).get(0);
}
// List<QueryResult<ConservationScoreRegion>> getConservation(List<Region> regions, QueryOptions queryOptions);
List<QueryResult<GenomicScoreRegion<Float>>> getConservation(List<Region> regions, QueryOptions queryOptions);
}
|
dapregi/cellbase
|
cellbase-core/src/main/java/org/opencb/cellbase/core/api/GenomeDBAdaptor.java
|
Java
|
apache-2.0
| 3,615 | 34.792079 | 129 | 0.730567 | false |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.manager;
import java.io.FileDescriptor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.net.InetAddress;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.CodeSource;
import java.security.Permission;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.security.ProtectionDomain;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
import java.util.PropertyPermission;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.kohsuke.MetaInfServices;
import org.wildfly.common.Assert;
import org.wildfly.security.ParametricPrivilegedAction;
import org.wildfly.security.ParametricPrivilegedExceptionAction;
import org.wildfly.security.manager.action.ClearPropertyAction;
import org.wildfly.security.manager.action.GetClassLoaderAction;
import org.wildfly.security.manager.action.GetContextClassLoaderAction;
import org.wildfly.security.manager.action.GetEnvironmentAction;
import org.wildfly.security.manager.action.GetProtectionDomainAction;
import org.wildfly.security.manager.action.GetSystemPropertiesAction;
import org.wildfly.security.manager.action.ReadEnvironmentPropertyAction;
import org.wildfly.security.manager.action.ReadPropertyAction;
import org.wildfly.security.manager.action.SetContextClassLoaderAction;
import org.wildfly.security.manager.action.WritePropertyAction;
import org.wildfly.security.permission.PermissionVerifier;
import sun.reflect.Reflection;
import static java.lang.System.clearProperty;
import static java.lang.System.getProperties;
import static java.lang.System.getProperty;
import static java.lang.System.getSecurityManager;
import static java.lang.System.getenv;
import static java.lang.System.setProperty;
import static java.lang.Thread.currentThread;
import static java.security.AccessController.doPrivileged;
import static java.security.AccessController.getContext;
import static org.wildfly.security.manager.WildFlySecurityManagerPermission.doUncheckedPermission;
import static org.wildfly.security.manager._private.SecurityMessages.access;
/**
* The security manager. This security manager implementation can be switched on and off on a per-thread basis,
* and additionally logs access violations in a way that should be substantially clearer than most JDK implementations.
*
* @author <a href="mailto:[email protected]">David M. Lloyd</a>
*/
@MetaInfServices(SecurityManager.class)
public final class WildFlySecurityManager extends SecurityManager implements PermissionVerifier {
private static final Permission SECURITY_MANAGER_PERMISSION = new RuntimePermission("setSecurityManager");
private static final Permission PROPERTIES_PERMISSION = new PropertyPermission("*", "read,write");
private static final Permission ENVIRONMENT_PERMISSION = new RuntimePermission("getenv.*");
private static final Permission GET_CLASS_LOADER_PERMISSION = new RuntimePermission("getClassLoader");
private static final Permission SET_CLASS_LOADER_PERMISSION = new RuntimePermission("setClassLoader");
private static final Permission ACCESS_DECLARED_MEMBERS_PERMISSION = new RuntimePermission("accessDeclaredMembers");
private static final boolean LOG_ONLY;
static class Context {
boolean checking = true;
boolean entered = false;
ParametricPrivilegedAction<Object, Object> action1;
ParametricPrivilegedExceptionAction<Object, Object> action2;
Object parameter;
}
private static final ThreadLocal<Context> CTX = new ThreadLocal<Context>() {
protected Context initialValue() {
return new Context();
}
};
private static final Field PD_STACK;
private static final WildFlySecurityManager INSTANCE;
private static final boolean hasGetCallerClass;
private static final int callerOffset;
static {
PD_STACK = doPrivileged(new GetAccessibleDeclaredFieldAction(AccessControlContext.class, "context"));
// Cannot be lambda due to JDK race conditions
//noinspection Convert2Lambda,Anonymous2MethodRef
INSTANCE = doPrivileged(new PrivilegedAction<WildFlySecurityManager>() {
public WildFlySecurityManager run() {
return new WildFlySecurityManager();
}
});
boolean result = false;
int offset = 0;
try {
//noinspection deprecation
result = Reflection.getCallerClass(1) == WildFlySecurityManager.class || Reflection.getCallerClass(2) == WildFlySecurityManager.class;
//noinspection deprecation
offset = Reflection.getCallerClass(1) == Reflection.class ? 2 : 1;
} catch (Throwable ignored) {}
hasGetCallerClass = result;
callerOffset = offset;
LOG_ONLY = Boolean.parseBoolean(doPrivileged(new ReadPropertyAction("org.wildfly.security.manager.log-only", "false")));
}
/**
* Construct a new instance. If the caller does not have permission to do so, this method will throw an exception.
*
* @throws SecurityException if the caller does not have permission to create a security manager instance
*/
public WildFlySecurityManager() throws SecurityException {
}
@Deprecated
public static void install() throws SecurityException {
if (System.getSecurityManager() instanceof WildFlySecurityManager) return;
System.setSecurityManager(new WildFlySecurityManager());
}
@SuppressWarnings("deprecation")
static Class<?> getCallerClass(int n) {
if (hasGetCallerClass) {
return Reflection.getCallerClass(n + callerOffset);
} else {
return getCallStack()[n + callerOffset];
}
}
static Class<?>[] getCallStack() {
return INSTANCE.getClassContext();
}
/**
* Determine whether the security manager is currently checking permissions.
*
* @return {@code true} if the security manager is currently checking permissions
*/
public static boolean isChecking() {
final SecurityManager sm = getSecurityManager();
return sm instanceof WildFlySecurityManager ? doCheck() : sm != null;
}
/**
* Perform a permission check.
*
* @param perm the permission to check
* @throws SecurityException if the check fails
*/
public void checkPermission(final Permission perm) throws SecurityException {
checkPermission(perm, AccessController.getContext());
}
/**
* Perform a permission check.
*
* @param perm the permission to check
* @param context the security context to use for the check (must be an {@link AccessControlContext} instance)
* @throws SecurityException if the check fails
*/
public void checkPermission(final Permission perm, final Object context) throws SecurityException {
if (context instanceof AccessControlContext) {
checkPermission(perm, (AccessControlContext) context);
} else {
throw access.unknownContext();
}
}
/**
* Find the protection domain in the given list which denies a permission, or {@code null} if the permission
* check would pass.
*
* @param permission the permission to test
* @param domains the protection domains to try
* @return the first denying protection domain, or {@code null} if there is none
*/
public static ProtectionDomain findAccessDenial(final Permission permission, final ProtectionDomain... domains) {
ProtectionDomain deniedDomain = null;
if (domains != null) for (ProtectionDomain domain : domains) {
if (! domain.implies(permission)) {
final CodeSource codeSource = domain.getCodeSource();
final ClassLoader classLoader = domain.getClassLoader();
final Principal[] principals = domain.getPrincipals();
if (principals == null || principals.length == 0) {
access.accessCheckFailed(permission, codeSource, classLoader);
} else {
access.accessCheckFailed(permission, codeSource, classLoader, Arrays.toString(principals));
}
if (deniedDomain == null && ! LOG_ONLY) {
deniedDomain = domain;
}
}
}
return deniedDomain;
}
/**
* Try a permission check. Any violations will be logged to the {@code org.wildfly.security.access} category
* at a {@code DEBUG} level.
*
* @param permission the permission to check
* @param domains the protection domains to try
* @return {@code true} if the access check succeeded, {@code false} otherwise
*/
public static boolean tryCheckPermission(final Permission permission, final ProtectionDomain... domains) {
if (permission.implies(SECURITY_MANAGER_PERMISSION)) {
return false;
}
final Context ctx = CTX.get();
if (ctx.checking) {
if (ctx.entered) {
return true;
}
ctx.entered = true;
try {
final ProtectionDomain deniedDomain = findAccessDenial(permission, domains);
if (deniedDomain != null) {
return false;
}
} finally {
ctx.entered = false;
}
}
return true;
}
public boolean implies(final Permission permission) {
return tryCheckPermission(permission, getProtectionDomainStack(getContext()));
}
/**
* Perform a permission check.
*
* @param perm the permission to check
* @param context the security context to use for the check
* @throws SecurityException if the check fails
*/
public void checkPermission(final Permission perm, final AccessControlContext context) throws SecurityException {
if (perm.implies(SECURITY_MANAGER_PERMISSION)) {
throw access.secMgrChange();
}
final Context ctx = CTX.get();
if (ctx.checking) {
if (ctx.entered) {
return;
}
final ProtectionDomain[] stack;
ctx.entered = true;
try {
stack = getProtectionDomainStack(context);
if (stack != null) {
final ProtectionDomain deniedDomain = findAccessDenial(perm, stack);
if (deniedDomain != null) {
throw access.accessControlException(perm, perm, deniedDomain.getCodeSource(), deniedDomain.getClassLoader());
}
}
} finally {
ctx.entered = false;
}
}
}
private static ProtectionDomain[] getProtectionDomainStack(final AccessControlContext context) {
final ProtectionDomain[] stack;
try {
stack = (ProtectionDomain[]) PD_STACK.get(context);
} catch (IllegalAccessException e) {
// should be impossible
throw new IllegalAccessError(e.getMessage());
}
return stack;
}
private static boolean doCheck() {
return doCheck(CTX.get());
}
private static boolean doCheck(final WildFlySecurityManager.Context ctx) {
return ctx.checking && ! ctx.entered;
}
public void checkCreateClassLoader() {
if (doCheck()) {
super.checkCreateClassLoader();
}
}
public void checkAccess(final Thread t) {
if (doCheck()) {
super.checkAccess(t);
}
}
public void checkAccess(final ThreadGroup g) {
if (doCheck()) {
super.checkAccess(g);
}
}
public void checkExit(final int status) {
if (doCheck()) {
super.checkExit(status);
}
}
public void checkExec(final String cmd) {
if (doCheck()) {
super.checkExec(cmd);
}
}
public void checkLink(final String lib) {
if (doCheck()) {
super.checkLink(lib);
}
}
public void checkRead(final FileDescriptor fd) {
if (doCheck()) {
super.checkRead(fd);
}
}
public void checkRead(final String file) {
if (doCheck()) {
super.checkRead(file);
}
}
public void checkRead(final String file, final Object context) {
if (doCheck()) {
super.checkRead(file, context);
}
}
public void checkWrite(final FileDescriptor fd) {
if (doCheck()) {
super.checkWrite(fd);
}
}
public void checkWrite(final String file) {
if (doCheck()) {
super.checkWrite(file);
}
}
public void checkDelete(final String file) {
if (doCheck()) {
super.checkDelete(file);
}
}
public void checkConnect(final String host, final int port) {
if (doCheck()) {
super.checkConnect(host, port);
}
}
public void checkConnect(final String host, final int port, final Object context) {
if (doCheck()) {
super.checkConnect(host, port, context);
}
}
public void checkListen(final int port) {
if (doCheck()) {
super.checkListen(port);
}
}
public void checkAccept(final String host, final int port) {
if (doCheck()) {
super.checkAccept(host, port);
}
}
public void checkMulticast(final InetAddress maddr) {
if (doCheck()) {
super.checkMulticast(maddr);
}
}
@Deprecated @SuppressWarnings("deprecation")
public void checkMulticast(final InetAddress maddr, final byte ttl) {
if (doCheck()) {
super.checkMulticast(maddr, ttl);
}
}
public void checkPropertiesAccess() {
if (doCheck()) {
super.checkPropertiesAccess();
}
}
public void checkPropertyAccess(final String key) {
final Context ctx = CTX.get();
if (doCheck(ctx)) {
/*
* Here is our expected stack:
* 0: this method
* 1: java.lang.System.getProperty() (may repeat)
* 2: user code | java.lang.(Boolean|Integer|Long).getXxx()
* 3+: ??? | java.lang.(Boolean|Integer|Long).getXxx() (more)
* n: | user code
*/
Class<?>[] context = getClassContext();
if (context.length < 3) {
super.checkPropertyAccess(key);
return;
}
if (context[1] != System.class) {
super.checkPropertyAccess(key);
return;
}
Class<?> testClass = context[2];
if (context.length >= 4) for (int i = 2; i < context.length; i ++) {
if (context[i] == Boolean.class || context[i] == Integer.class || context[i] == Long.class || context[i] == System.class) {
testClass = context[i + 1];
} else {
break;
}
}
final ProtectionDomain protectionDomain;
final ClassLoader classLoader;
final ClassLoader objectClassLoader;
ctx.entered = true;
try {
protectionDomain = testClass.getProtectionDomain();
classLoader = testClass.getClassLoader();
objectClassLoader = Object.class.getClassLoader();
} finally {
ctx.entered = false;
}
if (classLoader == objectClassLoader) {
// can't trust it, it's gone through more JDK code
super.checkPropertyAccess(key);
return;
}
final PropertyPermission permission = new PropertyPermission(key, "read");
if (protectionDomain.implies(permission)) {
return;
}
checkPermission(permission, AccessController.getContext());
}
}
public void checkPrintJobAccess() {
if (doCheck()) {
super.checkPrintJobAccess();
}
}
public void checkPackageAccess(final String pkg) {
if (doCheck()) {
super.checkPackageAccess(pkg);
}
}
public void checkPackageDefinition(final String pkg) {
if (doCheck()) {
super.checkPackageDefinition(pkg);
}
}
public void checkSetFactory() {
if (doCheck()) {
super.checkSetFactory();
}
}
private static final Class<?>[] ATOMIC_FIELD_UPDATER_TYPES = new Class<?>[] {
AtomicReferenceFieldUpdater.class, AtomicLongFieldUpdater.class, AtomicIntegerFieldUpdater.class
};
private static boolean isAssignableToOneOf(Class<?> test, Class<?>... expect) {
for (Class<?> clazz : expect) {
if (clazz.isAssignableFrom(test)) return true;
}
return false;
}
@Deprecated @SuppressWarnings("deprecation")
public void checkMemberAccess(final Class<?> clazz, final int which) {
final Context ctx = CTX.get();
if (doCheck(ctx)) {
Assert.checkNotNullParam("class", clazz);
if (which != Member.PUBLIC) {
/* The default sec mgr implementation makes some ugly assumptions about call stack depth that we must
* unfortunately replicate (and improve upon). Here are the stack elements we expect to see:
*
* 0: this method
* 1: java.lang.Class#checkMemberAccess()
* 2: java.lang.Class#getDeclared*() or similar in Class
* 3: user code | java.util.concurrent.Atomic*FieldUpdater (impl)
* 4+: ??? | java.util.concurrent.Atomic*FieldUpdater (possibly more)
* n: ??? | user code
*
* The great irony is that Class is supposed to detect that this method is overridden and fall back to
* a simple permission check, however that doesn't seem to be working in practice.
*/
Class<?>[] context = getClassContext();
int depth = context.length;
if (depth >= 4 && context[1] == Class.class && context[2] == Class.class) {
final ClassLoader objectClassLoader;
final ClassLoader clazzClassLoader;
ClassLoader classLoader;
// get class loaders without permission check
ctx.entered = true;
try {
objectClassLoader = Object.class.getClassLoader();
clazzClassLoader = clazz.getClassLoader();
for (int i = 3; i < depth; i ++) {
classLoader = context[i].getClassLoader();
if (classLoader == objectClassLoader) {
if (isAssignableToOneOf(context[i], ATOMIC_FIELD_UPDATER_TYPES)) {
// keep going
} else {
// unknown JDK class, fall back
checkPermission(ACCESS_DECLARED_MEMBERS_PERMISSION);
return;
}
} else {
if (clazzClassLoader == classLoader) {
// permission granted
return;
} else {
// class loaders differ
checkPermission(ACCESS_DECLARED_MEMBERS_PERMISSION);
return;
}
}
}
} finally {
ctx.entered = false;
}
}
// fall back to paranoid check
checkPermission(ACCESS_DECLARED_MEMBERS_PERMISSION);
}
}
}
public void checkSecurityAccess(final String target) {
if (doCheck()) {
super.checkSecurityAccess(target);
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param action the action to perform
* @param <T> the action return type
* @return the return value of the action
*/
public static <T> T doChecked(PrivilegedAction<T> action) {
final Context ctx = CTX.get();
if (ctx.checking) {
return action.run();
}
ctx.checking = true;
try {
return action.run();
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param action the action to perform
* @param <T> the action return type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T> T doChecked(PrivilegedExceptionAction<T> action) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (ctx.checking) {
try {
return action.run();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = true;
try {
return action.run();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @return the return value of the action
*/
public static <T> T doChecked(PrivilegedAction<T> action, AccessControlContext context) {
final Context ctx = CTX.get();
if (ctx.checking) {
return action.run();
}
ctx.checking = true;
try {
return AccessController.doPrivileged(action, context);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T> T doChecked(PrivilegedExceptionAction<T> action, AccessControlContext context) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (ctx.checking) {
try {
return action.run();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = true;
try {
return AccessController.doPrivileged(action, context);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
*/
public static <T, P> T doChecked(P parameter, ParametricPrivilegedAction<T, P> action) {
final Context ctx = CTX.get();
if (ctx.checking) {
return action.run(parameter);
}
ctx.checking = true;
try {
return action.run(parameter);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T, P> T doChecked(P parameter, ParametricPrivilegedExceptionAction<T, P> action) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (ctx.checking) {
try {
return action.run(parameter);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = true;
try {
return action.run(parameter);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
*/
public static <T, P> T doChecked(P parameter, ParametricPrivilegedAction<T, P> action, AccessControlContext context) {
final Context ctx = CTX.get();
if (ctx.checking) {
return action.run(parameter);
}
ctx.checking = true;
try {
return doPrivilegedWithParameter(parameter, action, context);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking enabled. If permission checking is already enabled, the action is
* simply run.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T, P> T doChecked(P parameter, ParametricPrivilegedExceptionAction<T, P> action, AccessControlContext context) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (ctx.checking) {
try {
return action.run(parameter);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = true;
try {
return doPrivilegedWithParameter(parameter, action, context);
} finally {
ctx.checking = false;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The immediate caller must have the {@code doUnchecked} runtime permission.
*
* @param action the action to perform
* @param <T> the action return type
* @return the return value of the action
*/
public static <T> T doUnchecked(PrivilegedAction<T> action) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return action.run();
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return action.run();
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The caller must have the {@code doUnchecked} runtime permission.
*
* @param action the action to perform
* @param <T> the action return type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T> T doUnchecked(PrivilegedExceptionAction<T> action) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (! ctx.checking) {
try {
return action.run();
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return action.run();
} catch (Exception e) {
throw new PrivilegedActionException(e);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The immediate caller must have the {@code doUnchecked} runtime permission.
*
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @return the return value of the action
*/
public static <T> T doUnchecked(PrivilegedAction<T> action, AccessControlContext context) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return AccessController.doPrivileged(action, context);
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return AccessController.doPrivileged(action, context);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The caller must have the {@code doUnchecked} runtime permission.
*
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T> T doUnchecked(PrivilegedExceptionAction<T> action, AccessControlContext context) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (! ctx.checking) {
return AccessController.doPrivileged(action, context);
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return AccessController.doPrivileged(action, context);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The immediate caller must have the {@code doUnchecked} runtime permission.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
*/
public static <T, P> T doUnchecked(P parameter, ParametricPrivilegedAction<T, P> action) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return action.run(parameter);
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return action.run(parameter);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The caller must have the {@code doUnchecked} runtime permission.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T, P> T doUnchecked(P parameter, ParametricPrivilegedExceptionAction<T, P> action) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (! ctx.checking) {
try {
return action.run(parameter);
} catch (Exception e) {
throw new PrivilegedActionException(e);
}
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return action.run(parameter);
} catch (Exception e) {
throw new PrivilegedActionException(e);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The immediate caller must have the {@code doUnchecked} runtime permission.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
*/
public static <T, P> T doUnchecked(P parameter, ParametricPrivilegedAction<T, P> action, AccessControlContext context) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return doPrivilegedWithParameter(parameter, action, context);
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return doPrivilegedWithParameter(parameter, action, context);
} finally {
ctx.checking = true;
}
}
/**
* Perform an action with permission checking disabled. If permission checking is already disabled, the action is
* simply run. The caller must have the {@code doUnchecked} runtime permission.
*
* @param parameter the parameter to pass to the action
* @param action the action to perform
* @param context the access control context to use
* @param <T> the action return type
* @param <P> the action parameter type
* @return the return value of the action
* @throws PrivilegedActionException if the action threw an exception
*/
public static <T, P> T doUnchecked(P parameter, ParametricPrivilegedExceptionAction<T, P> action, AccessControlContext context) throws PrivilegedActionException {
final Context ctx = CTX.get();
if (! ctx.checking) {
return doPrivilegedWithParameter(parameter, action, context);
}
ctx.checking = false;
try {
final SecurityManager sm = getSecurityManager();
if (sm != null) {
checkPDPermission(getCallerClass(2), doUncheckedPermission);
}
return doPrivilegedWithParameter(parameter, action, context);
} finally {
ctx.checking = true;
}
}
private static void checkPropertyReadPermission(Class<?> clazz, String propertyName) {
final ProtectionDomain protectionDomain;
final ClassLoader classLoader;
if (getSecurityManager() instanceof WildFlySecurityManager) {
protectionDomain = clazz.getProtectionDomain();
classLoader = clazz.getClassLoader();
} else {
protectionDomain = doPrivileged(new GetProtectionDomainAction(clazz));
classLoader = doPrivileged(new GetClassLoaderAction(clazz));
}
if (protectionDomain.implies(PROPERTIES_PERMISSION)) {
return;
}
final PropertyPermission permission = new PropertyPermission(propertyName, "read");
if (protectionDomain.implies(permission)) {
return;
}
access.accessCheckFailed(permission, protectionDomain.getCodeSource(), classLoader);
if (! LOG_ONLY) {
throw access.accessControlException(permission, permission, protectionDomain.getCodeSource(), classLoader);
}
}
private static void checkEnvPropertyReadPermission(Class<?> clazz, String propertyName) {
final ProtectionDomain protectionDomain;
final ClassLoader classLoader;
if (getSecurityManager() instanceof WildFlySecurityManager) {
protectionDomain = clazz.getProtectionDomain();
classLoader = clazz.getClassLoader();
} else {
protectionDomain = doPrivileged(new GetProtectionDomainAction(clazz));
classLoader = doPrivileged(new GetClassLoaderAction(clazz));
}
if (protectionDomain.implies(ENVIRONMENT_PERMISSION)) {
return;
}
final RuntimePermission permission = new RuntimePermission("getenv." + propertyName);
if (protectionDomain.implies(permission)) {
return;
}
access.accessCheckFailed(permission, protectionDomain.getCodeSource(), classLoader);
if (! LOG_ONLY) {
throw access.accessControlException(permission, permission, protectionDomain.getCodeSource(), classLoader);
}
}
private static void checkPropertyWritePermission(Class<?> clazz, String propertyName) {
final ProtectionDomain protectionDomain;
final ClassLoader classLoader;
if (getSecurityManager() instanceof WildFlySecurityManager) {
protectionDomain = clazz.getProtectionDomain();
classLoader = clazz.getClassLoader();
} else {
protectionDomain = doPrivileged(new GetProtectionDomainAction(clazz));
classLoader = doPrivileged(new GetClassLoaderAction(clazz));
}
if (protectionDomain.implies(PROPERTIES_PERMISSION)) {
return;
}
final PropertyPermission permission = new PropertyPermission(propertyName, "write");
if (protectionDomain.implies(permission)) {
return;
}
access.accessCheckFailed(permission, protectionDomain.getCodeSource(), classLoader);
if (! LOG_ONLY) {
throw access.accessControlException(permission, permission, protectionDomain.getCodeSource(), classLoader);
}
}
private static void checkPDPermission(Class<?> clazz, Permission permission) {
final ProtectionDomain protectionDomain;
final ClassLoader classLoader;
if (getSecurityManager() instanceof WildFlySecurityManager) {
protectionDomain = clazz.getProtectionDomain();
classLoader = clazz.getClassLoader();
} else {
protectionDomain = doPrivileged(new GetProtectionDomainAction(clazz));
classLoader = doPrivileged(new GetClassLoaderAction(clazz));
}
if (protectionDomain.implies(permission)) {
return;
}
access.accessCheckFailed(permission, protectionDomain.getCodeSource(), classLoader);
if (! LOG_ONLY) {
throw access.accessControlException(permission, permission, protectionDomain.getCodeSource(), classLoader);
}
}
/**
* Get a property, doing a faster permission check that skips having to execute a privileged action frame.
*
* @param name the property name
* @param def the default value if the property is not found
* @return the property value, or the default value
*/
public static String getPropertyPrivileged(String name, String def) {
final SecurityManager sm = getSecurityManager();
if (sm == null) {
return getProperty(name, def);
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return getProperty(name, def);
}
ctx.checking = false;
try {
checkPropertyReadPermission(getCallerClass(2), name);
return getProperty(name, def);
} finally {
ctx.checking = true;
}
} else {
checkPropertyReadPermission(getCallerClass(2), name);
return doPrivileged(new ReadPropertyAction(name, def));
}
}
private static <T> T def(T test, T def) {
return test == null ? def : test;
}
/**
* Get an environmental property, doing a faster permission check that skips having to execute a privileged action frame.
*
* @param name the property name
* @param def the default value if the property is not found
* @return the property value, or the default value
*/
public static String getEnvPropertyPrivileged(String name, String def) {
final SecurityManager sm = getSecurityManager();
if (sm == null) {
return getenv(name);
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return def(getenv(name), def);
}
ctx.checking = false;
try {
checkEnvPropertyReadPermission(getCallerClass(2), name);
return def(getenv(name), def);
} finally {
ctx.checking = true;
}
} else {
checkEnvPropertyReadPermission(getCallerClass(2), name);
return doPrivileged(new ReadEnvironmentPropertyAction(name, def));
}
}
/**
* Set a property, doing a faster permission check that skips having to execute a privileged action frame.
*
* @param name the property name
* @param value the value ot set
* @return the previous property value, or {@code null} if there was none
*/
public static String setPropertyPrivileged(String name, String value) {
final SecurityManager sm = getSecurityManager();
if (sm == null) {
return setProperty(name, value);
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return setProperty(name, value);
}
ctx.checking = false;
try {
checkPropertyWritePermission(getCallerClass(2), name);
return setProperty(name, value);
} finally {
ctx.checking = true;
}
} else {
checkPropertyWritePermission(getCallerClass(2), name);
return doPrivileged(new WritePropertyAction(name, value));
}
}
/**
* Clear a property, doing a faster permission check that skips having to execute a privileged action frame.
*
* @param name the property name
* @return the previous property value, or {@code null} if there was none
*/
public static String clearPropertyPrivileged(String name) {
final SecurityManager sm = getSecurityManager();
if (sm == null) {
return clearProperty(name);
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return clearProperty(name);
}
ctx.checking = false;
try {
checkPropertyWritePermission(getCallerClass(2), name);
return clearProperty(name);
} finally {
ctx.checking = true;
}
} else {
checkPropertyWritePermission(getCallerClass(2), name);
return doPrivileged(new ClearPropertyAction(name));
}
}
/**
* Get the current thread's context class loader, doing a faster permission check that skips having to execute a
* privileged action frame.
*
* @return the context class loader
*/
public static ClassLoader getCurrentContextClassLoaderPrivileged() {
final SecurityManager sm = System.getSecurityManager();
if (sm == null) {
return currentThread().getContextClassLoader();
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return currentThread().getContextClassLoader();
}
ctx.checking = false;
try {
checkPDPermission(getCallerClass(2), GET_CLASS_LOADER_PERMISSION);
return currentThread().getContextClassLoader();
} finally {
ctx.checking = true;
}
} else {
checkPDPermission(getCallerClass(2), GET_CLASS_LOADER_PERMISSION);
return doPrivileged(GetContextClassLoaderAction.getInstance());
}
}
/**
* Set the current thread's context class loader, doing a faster permission check that skips having to execute a
* privileged action frame.
*
* @param newClassLoader the new class loader to set
* @return the previously set context class loader
*/
public static ClassLoader setCurrentContextClassLoaderPrivileged(ClassLoader newClassLoader) {
final SecurityManager sm = System.getSecurityManager();
final Thread thread = currentThread();
if (sm == null) try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(newClassLoader);
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(newClassLoader);
}
ctx.checking = false;
// separate try/finally to guarantee proper exception flow
try {
checkPDPermission(getCallerClass(2), SET_CLASS_LOADER_PERMISSION);
try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(newClassLoader);
}
} finally {
ctx.checking = true;
}
} else {
checkPDPermission(getCallerClass(2), SET_CLASS_LOADER_PERMISSION);
return doPrivileged(new SetContextClassLoaderAction(newClassLoader));
}
}
/**
* Set the current thread's context class loader, doing a faster permission check that skips having to execute a
* privileged action frame.
*
* @param clazz the class whose class loader is the new class loader to set
* @return the previously set context class loader
*/
public static ClassLoader setCurrentContextClassLoaderPrivileged(final Class<?> clazz) {
final SecurityManager sm = System.getSecurityManager();
final Thread thread = currentThread();
if (sm == null) try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(clazz.getClassLoader());
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(clazz.getClassLoader());
}
ctx.checking = false;
// separate try/finally to guarantee proper exception flow
try {
final Class<?> caller = getCallerClass(2);
checkPDPermission(caller, SET_CLASS_LOADER_PERMISSION);
checkPDPermission(caller, GET_CLASS_LOADER_PERMISSION);
try {
return thread.getContextClassLoader();
} finally {
thread.setContextClassLoader(clazz.getClassLoader());
}
} finally {
ctx.checking = true;
}
} else {
final Class<?> caller = getCallerClass(2);
checkPDPermission(caller, SET_CLASS_LOADER_PERMISSION);
checkPDPermission(caller, GET_CLASS_LOADER_PERMISSION);
return doPrivileged(new SetContextClassLoaderAction(clazz.getClassLoader()));
}
}
/**
* Get the system properties map, doing a faster permission check that skips having to execute a privileged action
* frame.
*
* @return the system property map
*/
public static Properties getSystemPropertiesPrivileged() {
final SecurityManager sm = System.getSecurityManager();
if (sm == null) {
return getProperties();
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return getProperties();
}
ctx.checking = false;
try {
checkPDPermission(getCallerClass(2), PROPERTIES_PERMISSION);
return getProperties();
} finally {
ctx.checking = true;
}
} else {
checkPDPermission(getCallerClass(2), PROPERTIES_PERMISSION);
return doPrivileged(GetSystemPropertiesAction.getInstance());
}
}
/**
* Get the system environment map, doing a faster permission check that skips having to execute a privileged action
* frame.
*
* @return the system environment map
*/
public static Map<String, String> getSystemEnvironmentPrivileged() {
final SecurityManager sm = System.getSecurityManager();
if (sm == null) {
return getenv();
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return getenv();
}
ctx.checking = false;
try {
checkPDPermission(getCallerClass(2), ENVIRONMENT_PERMISSION);
return getenv();
} finally {
ctx.checking = true;
}
} else {
checkPDPermission(getCallerClass(2), ENVIRONMENT_PERMISSION);
return doPrivileged(GetEnvironmentAction.getInstance());
}
}
/**
* Get the class loader for a class, doing a faster permission check that skips having to execute a privileged action
* frame.
*
* @param clazz the class to check
* @return the class loader
*/
public static ClassLoader getClassLoaderPrivileged(Class<?> clazz) {
final SecurityManager sm = System.getSecurityManager();
if (sm == null) {
return clazz.getClassLoader();
}
if (sm instanceof WildFlySecurityManager) {
final Context ctx = CTX.get();
if (! ctx.checking) {
return clazz.getClassLoader();
}
ctx.checking = false;
try {
checkPDPermission(getCallerClass(2), GET_CLASS_LOADER_PERMISSION);
return clazz.getClassLoader();
} finally {
ctx.checking = true;
}
} else {
checkPDPermission(getCallerClass(2), GET_CLASS_LOADER_PERMISSION);
return doPrivileged(new GetClassLoaderAction(clazz));
}
}
private static final ClassValue<AccessControlContext> ACC_CACHE = new ClassValue<AccessControlContext>() {
protected AccessControlContext computeValue(final Class<?> type) {
final Context ctx = CTX.get();
assert ! ctx.entered;
ctx.entered = true;
try {
return new AccessControlContext(new ProtectionDomain[] { type.getProtectionDomain() });
} finally {
ctx.entered = false;
}
}
};
// Cannot be lambda due to JDK race conditions
@SuppressWarnings("Convert2Lambda")
private static final PrivilegedAction<Object> PA_TRAMPOLINE1 = new PrivilegedAction<Object>() {
public Object run() {
final Context ctx = CTX.get();
final ParametricPrivilegedAction<Object, Object> a = ctx.action1;
final Object p = ctx.parameter;
ctx.action1 = null;
ctx.parameter = null;
return a.run(p);
}
};
// Cannot be lambda due to JDK race conditions
@SuppressWarnings("Convert2Lambda")
private static final PrivilegedExceptionAction<Object> PA_TRAMPOLINE2 = new PrivilegedExceptionAction<Object>() {
public Object run() throws Exception {
final Context ctx = CTX.get();
final ParametricPrivilegedExceptionAction<Object, Object> a = ctx.action2;
final Object p = ctx.parameter;
ctx.action2 = null;
ctx.parameter = null;
return a.run(p);
}
};
/**
* Execute a parametric privileged action with the given parameter in a privileged context.
*
* @param parameter the parameter to send in to the action
* @param action the action to execute
* @param <T> the action result type
* @param <P> the parameter type
* @return the action result
*/
@SuppressWarnings("unchecked")
public static <T, P> T doPrivilegedWithParameter(P parameter, ParametricPrivilegedAction<T, P> action) {
final Context ctx = CTX.get();
ctx.action1 = (ParametricPrivilegedAction<Object, Object>) action;
ctx.parameter = parameter;
return (T) doPrivileged(PA_TRAMPOLINE1, ACC_CACHE.get(getCallerClass(2)));
}
/**
* Execute a parametric privileged action with the given parameter in a privileged context.
*
* @param parameter the parameter to send in to the action
* @param action the action to execute
* @param <T> the action result type
* @param <P> the parameter type
* @return the action result
*/
@SuppressWarnings("unchecked")
public static <T, P> T doPrivilegedWithParameter(P parameter, ParametricPrivilegedExceptionAction<T, P> action) throws PrivilegedActionException {
final Context ctx = CTX.get();
ctx.action2 = (ParametricPrivilegedExceptionAction<Object, Object>) action;
ctx.parameter = parameter;
return (T) doPrivileged(PA_TRAMPOLINE2, ACC_CACHE.get(getCallerClass(2)));
}
/**
* Execute a parametric privileged action with the given parameter with the given context.
*
* @param parameter the parameter to send in to the action
* @param action the action to execute
* @param accessControlContext the context to use
* @param <T> the action result type
* @param <P> the parameter type
* @return the action result
*/
@SuppressWarnings("unchecked")
public static <T, P> T doPrivilegedWithParameter(P parameter, ParametricPrivilegedAction<T, P> action, AccessControlContext accessControlContext) {
final Context ctx = CTX.get();
ctx.action1 = (ParametricPrivilegedAction<Object, Object>) action;
ctx.parameter = parameter;
ctx.entered = true;
final AccessControlContext combined;
try {
ProtectionDomain[] protectionDomainStack = getProtectionDomainStack(accessControlContext);
if (protectionDomainStack == null || protectionDomainStack.length == 0) {
combined = ACC_CACHE.get(getCallerClass(2));
} else {
final ProtectionDomain[] finalDomains = Arrays.copyOf(protectionDomainStack, protectionDomainStack.length + 1);
finalDomains[protectionDomainStack.length] = getCallerClass(2).getProtectionDomain();
combined = new AccessControlContext(finalDomains);
}
} finally {
ctx.entered = false;
}
return (T) doPrivileged(PA_TRAMPOLINE1, combined);
}
/**
* Execute a parametric privileged action with the given parameter with the given context.
*
* @param parameter the parameter to send in to the action
* @param action the action to execute
* @param accessControlContext the context to use
* @param <T> the action result type
* @param <P> the parameter type
* @return the action result
*/
@SuppressWarnings("unchecked")
public static <T, P> T doPrivilegedWithParameter(P parameter, ParametricPrivilegedExceptionAction<T, P> action, AccessControlContext accessControlContext) throws PrivilegedActionException {
final Context ctx = CTX.get();
ctx.action2 = (ParametricPrivilegedExceptionAction<Object, Object>) action;
ctx.parameter = parameter;
ctx.entered = true;
final AccessControlContext combined;
try {
ProtectionDomain[] protectionDomainStack = getProtectionDomainStack(accessControlContext);
if (protectionDomainStack == null || protectionDomainStack.length == 0) {
combined = ACC_CACHE.get(getCallerClass(2));
} else {
final ProtectionDomain[] finalDomains = Arrays.copyOf(protectionDomainStack, protectionDomainStack.length + 1);
finalDomains[protectionDomainStack.length] = getCallerClass(2).getProtectionDomain();
combined = new AccessControlContext(finalDomains);
}
} finally {
ctx.entered = false;
}
return (T) doPrivileged(PA_TRAMPOLINE2, combined);
}
}
|
sguilhen/wildfly-elytron
|
src/main/java/org/wildfly/security/manager/WildFlySecurityManager.java
|
Java
|
apache-2.0
| 60,435 | 37.567326 | 193 | 0.605494 | false |
/*! gridster.js - v0.5.6 - 2014-09-25
* http://gridster.net/
* Copyright (c) 2014 ducksboard; Licensed MIT */
.gridster {
position:relative;
}
.gridster .gs-w{
z-index: 100;
position: absolute;
cursor: pointer;
box-shadow: 0px 0px 5px rgba(0, 0, 0, 0.3);
}
.gridster .preview-holder {
z-index: 1;
position: absolute;
background-color: #b7b7b7;
}
.gridster .player-revert {
z-index: 10!important;
}
.gridster .dragging {
z-index: 10!important;
}
|
ibmjstart/ibmjstart.github.io
|
gridplumb/css/jquery.gridster.css
|
CSS
|
apache-2.0
| 496 | 16.75 | 48 | 0.618952 | false |
// Copyright 2017 The Kubernetes Dashboard Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package service
import (
"reflect"
"testing"
"github.com/kubernetes/dashboard/src/app/backend/api"
"github.com/kubernetes/dashboard/src/app/backend/resource/common"
"github.com/kubernetes/dashboard/src/app/backend/resource/endpoint"
"github.com/kubernetes/dashboard/src/app/backend/resource/pod"
metaV1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/pkg/api/v1"
)
func TestToServiceDetail(t *testing.T) {
cases := []struct {
service *v1.Service
eventList common.EventList
podList pod.PodList
endpointList endpoint.EndpointList
expected ServiceDetail
}{
{
service: &v1.Service{},
eventList: common.EventList{},
podList: pod.PodList{},
endpointList: endpoint.EndpointList{},
expected: ServiceDetail{
TypeMeta: api.TypeMeta{Kind: api.ResourceKindService},
},
}, {
service: &v1.Service{
ObjectMeta: metaV1.ObjectMeta{
Name: "test-service", Namespace: "test-namespace",
}},
expected: ServiceDetail{
ObjectMeta: api.ObjectMeta{
Name: "test-service",
Namespace: "test-namespace",
},
TypeMeta: api.TypeMeta{Kind: api.ResourceKindService},
InternalEndpoint: common.Endpoint{Host: "test-service.test-namespace"},
},
},
}
for _, c := range cases {
actual := ToServiceDetail(c.service, c.eventList, c.podList, c.endpointList, nil)
if !reflect.DeepEqual(actual, c.expected) {
t.Errorf("ToServiceDetail(%#v) == \ngot %#v, \nexpected %#v", c.service, actual,
c.expected)
}
}
}
func TestToService(t *testing.T) {
cases := []struct {
service *v1.Service
expected Service
}{
{
service: &v1.Service{}, expected: Service{
TypeMeta: api.TypeMeta{Kind: api.ResourceKindService},
},
}, {
service: &v1.Service{
ObjectMeta: metaV1.ObjectMeta{
Name: "test-service", Namespace: "test-namespace",
}},
expected: Service{
ObjectMeta: api.ObjectMeta{
Name: "test-service",
Namespace: "test-namespace",
},
TypeMeta: api.TypeMeta{Kind: api.ResourceKindService},
InternalEndpoint: common.Endpoint{Host: "test-service.test-namespace"},
},
},
}
for _, c := range cases {
actual := ToService(c.service)
if !reflect.DeepEqual(actual, c.expected) {
t.Errorf("ToService(%#v) == \ngot %#v, \nexpected %#v", c.service, actual,
c.expected)
}
}
}
|
vlal/dashboard
|
src/app/backend/resource/service/common_test.go
|
GO
|
apache-2.0
| 2,987 | 27.721154 | 83 | 0.677938 | false |
/*
Copyright (C) 2006 by
Xuan-Hieu Phan
Email: [email protected]
[email protected]
URL: http://www.hori.ecei.tohoku.ac.jp/~hieuxuan
Graduate School of Information Sciences,
Tohoku University
*/
package fpt.qa.intent.detection.qc;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import jmdn.md.method.classselection.ClassSelector;
import jmdn.struct.pair.PairIntDouble;
import jmdn.struct.pair.PairStrDouble;
public class Classification {
public Option option = null;
public Data data = null;
public Dictionary dict = null;
public FeatGen feagen = null;
public Inference inference = null;
public Model model = null;
public boolean initialized = false;
private BufferedReader finModel = null;
List intCps = null;
public Classification(String modelDir) {
option = new Option(modelDir);
option.readOptions();
// init();
}
public boolean isInitialized() {
return initialized;
}
public void init() {
try {
// open model file
finModel = option.openModelFile();
if (finModel == null) {
System.out.println("Couldn't open model file");
return;
}
data = new Data(option);
// read context predicate map
data.readCpMaps(finModel);
// read label map
data.readLbMaps(finModel);
dict = new Dictionary(option, data);
// read dictionary
dict.readDict(finModel);
feagen = new FeatGen(option, data, dict);
// read features
feagen.readFeatures(finModel);
// create an inference object
inference = new Inference();
// create a model object
model = new Model(option, data, dict, feagen, null, inference, null);
model.initInference();
// close model file
finModel.close();
} catch(IOException e) {
System.out.println("Couldn't load the model, check the model file again");
System.out.println(e.toString());
}
intCps = new ArrayList();
initialized = true;
}
public String classify(String cps) {
// cps contains a list of context predicates
String modelLabel = "";
int i;
intCps.clear();
StringTokenizer strTok = new StringTokenizer(cps, " \t\r\n");
int count = strTok.countTokens();
for (i = 0; i < count; i++) {
String cpStr = strTok.nextToken();
Integer cpInt = (Integer)data.cpStr2Int.get(cpStr);
if (cpInt != null) {
intCps.add(cpInt);
}
}
Observation obsr = new Observation(intCps);
// classify
inference.classify(obsr);
String lbStr = (String)data.lbInt2Str.get(new Integer(obsr.modelLabel));
if (lbStr != null) {
modelLabel = lbStr;
}
return modelLabel;
}
public List<String> classify(List data) {
List list = new ArrayList();
for (int i = 0; i < data.size(); i++) {
list.add(classify((String)data.get(i)));
}
return list;
}
public List<PairStrDouble> classifyMultiLabels(String cps,
int numSelected, double cumulativeThreshold, double times) {
List<PairStrDouble> labelsAndWeights = new ArrayList();
int i;
intCps.clear();
StringTokenizer strTok = new StringTokenizer(cps, " \t\r\n");
int count = strTok.countTokens();
for (i = 0; i < count; i++) {
String cpStr = strTok.nextToken();
Integer cpInt = (Integer)data.cpStr2Int.get(cpStr);
if (cpInt != null) {
intCps.add(cpInt);
}
}
Observation obsr = new Observation(intCps);
List<PairIntDouble> probs = inference.getDistribution(obsr);
ClassSelector selector = new ClassSelector(probs);
List<PairIntDouble> selectedClasses =
selector.select(numSelected, cumulativeThreshold, times);
for (i = 0; i < selectedClasses.size(); i++) {
PairIntDouble pair = selectedClasses.get(i);
String lbStr = (String)data.lbInt2Str.get(pair.first);
if (lbStr != null) {
labelsAndWeights.add(new PairStrDouble(lbStr, pair.second));
}
}
return labelsAndWeights;
}
} // end of class Classification
|
NguyenAnhDuc/fpt-qa
|
src/main/java/fpt/qa/intent/detection/qc/Classification.java
|
Java
|
apache-2.0
| 4,281 | 23.323864 | 79 | 0.628825 | false |
package com.gdn.venice.client.app.logistic.view.handlers;
import java.util.List;
import com.gwtplatform.mvp.client.UiHandlers;
import com.smartgwt.client.data.DataSource;
public interface DeliveryStatusTrackingUiHandlers extends UiHandlers {
public DataSource onShowMerchantPickUpDetail(String airwayBillId);
public List<DataSource> onShowDeliveryStatusTrackingDetail(String airwayBillId, String orderId);
public void onFetchComboBoxData();
}
|
yauritux/venice-legacy
|
Venice/Venice-Web/target/classes/com/gdn/venice/client/app/logistic/view/handlers/DeliveryStatusTrackingUiHandlers.java
|
Java
|
apache-2.0
| 462 | 36.5 | 97 | 0.822511 | false |
/*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
import { ReduceStore } from 'flux/utils';
import Dispatcher from '../dispatcher/ActorAppDispatcher';
import { ActionTypes } from '../constants/ActorAppConstants';
class ActivityStore extends ReduceStore {
getInitialState() {
return {
isOpen: false
};
}
reduce(state, action) {
switch (action.type) {
case ActionTypes.CALL_MODAL_OPEN:
case ActionTypes.ACTIVITY_HIDE:
return this.getInitialState();
case ActionTypes.ACTIVITY_SHOW:
return {
isOpen: true
};
default:
return state;
}
}
isOpen() {
return this.getState().isOpen;
}
}
export default new ActivityStore(Dispatcher);
|
EaglesoftZJ/iGem_Web
|
src/stores/ActivityStore.js
|
JavaScript
|
apache-2.0
| 741 | 20.171429 | 61 | 0.634278 | false |
/*
* Copyright 2016 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.model;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.teavm.model.instructions.ArrayElementType;
import org.teavm.model.instructions.BinaryBranchingCondition;
import org.teavm.model.instructions.BinaryOperation;
import org.teavm.model.instructions.BranchingCondition;
import org.teavm.model.instructions.CastIntegerDirection;
import org.teavm.model.instructions.InstructionReader;
import org.teavm.model.instructions.IntegerSubtype;
import org.teavm.model.instructions.InvocationType;
import org.teavm.model.instructions.NumericOperandType;
import org.teavm.model.instructions.SwitchTableEntryReader;
public class Interpreter {
private ClassLoader classLoader;
private BasicBlockReader currentBlock;
private List<List<IncomingReader>> outgoings;
private Object[] variables;
private Object result;
private State state;
public Interpreter(ClassLoader classLoader) {
this.classLoader = classLoader;
}
public Object interpret(ProgramReader program, Object[] parameters) throws InterpretException {
variables = new Object[program.variableCount()];
System.arraycopy(parameters, 0, variables, 0, parameters.length);
currentBlock = program.basicBlockAt(0);
state = State.EXECUTING;
outgoings = new ArrayList<>();
for (int i = 0; i < program.basicBlockCount(); ++i) {
outgoings.add(new ArrayList<>());
}
for (int i = 0; i < program.basicBlockCount(); ++i) {
BasicBlockReader block = program.basicBlockAt(i);
for (PhiReader phi : block.readPhis()) {
for (IncomingReader incoming : phi.readIncomings()) {
outgoings.get(incoming.getSource().getIndex()).add(incoming);
}
}
}
try {
while (true) {
InstructionIterator iterator = currentBlock.iterateInstructions();
try {
while (iterator.hasNext()) {
iterator.next();
iterator.read(reader);
}
} catch (RuntimeException e) {
if (!pickExceptionHandler(e)) {
throw new InterpretException(currentBlock, e);
}
}
switch (state) {
case EXITED: {
return result;
}
case THROWN: {
Throwable ex = (Throwable) result;
throw new InterpretException(currentBlock, ex);
}
case EXECUTING:
break;
}
}
} finally {
currentBlock = null;
variables = null;
outgoings = null;
result = null;
}
}
private boolean pickExceptionHandler(Throwable e) {
for (TryCatchBlockReader tryCatch : currentBlock.readTryCatchBlocks()) {
Class<?> exceptionType;
try {
exceptionType = tryCatch.getExceptionType() != null
? Class.forName(tryCatch.getExceptionType(), false, classLoader)
: null;
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Can't find exception class " + tryCatch.getExceptionType());
}
if (exceptionType == null || exceptionType.isInstance(e)) {
currentBlock = tryCatch.getProtectedBlock();
return true;
}
}
return false;
}
private InstructionReader reader = new InstructionReader() {
@Override
public void location(TextLocation location) {
}
@Override
public void nop() {
}
@Override
public void classConstant(VariableReader receiver, ValueType cst) {
variables[receiver.getIndex()] = asJvmClass(cst);
}
@Override
public void nullConstant(VariableReader receiver) {
variables[receiver.getIndex()] = null;
}
@Override
public void integerConstant(VariableReader receiver, int cst) {
variables[receiver.getIndex()] = cst;
}
@Override
public void longConstant(VariableReader receiver, long cst) {
variables[receiver.getIndex()] = cst;
}
@Override
public void floatConstant(VariableReader receiver, float cst) {
variables[receiver.getIndex()] = cst;
}
@Override
public void doubleConstant(VariableReader receiver, double cst) {
variables[receiver.getIndex()] = cst;
}
@Override
public void stringConstant(VariableReader receiver, String cst) {
variables[receiver.getIndex()] = cst;
}
@Override
public void binary(BinaryOperation op, VariableReader receiver, VariableReader first, VariableReader second,
NumericOperandType type) {
switch (type) {
case INT: {
int a = (Integer) variables[first.getIndex()];
int b = (Integer) variables[second.getIndex()];
int result;
switch (op) {
case ADD:
result = a + b;
break;
case SUBTRACT:
result = a - b;
break;
case MULTIPLY:
result = a * b;
break;
case DIVIDE:
result = a * b;
break;
case MODULO:
result = a % b;
break;
case COMPARE:
result = Integer.compare(a, b);
break;
case AND:
result = a & b;
break;
case OR:
result = a | b;
break;
case XOR:
result = a ^ b;
break;
default:
throw new IllegalArgumentException("Unknown operation: " + op);
}
variables[receiver.getIndex()] = result;
break;
}
case LONG: {
long a = (Long) variables[first.getIndex()];
long b = (Long) variables[second.getIndex()];
long result;
switch (op) {
case ADD:
result = a + b;
break;
case SUBTRACT:
result = a - b;
break;
case MULTIPLY:
result = a * b;
break;
case DIVIDE:
result = a * b;
break;
case MODULO:
result = a % b;
break;
case COMPARE:
result = Long.compare(a, b);
break;
case AND:
result = a & b;
break;
case OR:
result = a | b;
break;
case XOR:
result = a ^ b;
break;
default:
throw new IllegalArgumentException("Unknown operation: " + op);
}
variables[receiver.getIndex()] = result;
break;
}
case FLOAT: {
float a = (Float) variables[first.getIndex()];
float b = (Float) variables[second.getIndex()];
float result;
switch (op) {
case ADD:
result = a + b;
break;
case SUBTRACT:
result = a - b;
break;
case MULTIPLY:
result = a * b;
break;
case DIVIDE:
result = a * b;
break;
case MODULO:
result = a % b;
break;
case COMPARE:
result = Float.compare(a, b);
break;
case AND:
case OR:
case XOR:
throw new IllegalArgumentException("Unsupported operation " + op
+ " for operands of type" + type);
default:
throw new IllegalArgumentException("Unknown operation: " + op);
}
variables[receiver.getIndex()] = result;
break;
}
case DOUBLE: {
double a = (Double) variables[first.getIndex()];
double b = (Double) variables[second.getIndex()];
double result;
switch (op) {
case ADD:
result = a + b;
break;
case SUBTRACT:
result = a - b;
break;
case MULTIPLY:
result = a * b;
break;
case DIVIDE:
result = a * b;
break;
case MODULO:
result = a % b;
break;
case COMPARE:
result = Double.compare(a, b);
break;
case AND:
case OR:
case XOR:
throw new IllegalArgumentException("Unsupported operation " + op
+ " for operands of type" + type);
default:
throw new IllegalArgumentException("Unknown operation: " + op);
}
variables[receiver.getIndex()] = result;
break;
}
}
}
@Override
public void negate(VariableReader receiver, VariableReader operand, NumericOperandType type) {
Object result;
Object a = variables[operand.getIndex()];
switch (type) {
case INT:
result = -(Integer) a;
break;
case LONG:
result = -(Long) a;
break;
case FLOAT:
result = -(Float) a;
break;
case DOUBLE:
result = -(Double) a;
break;
default:
throw new IllegalArgumentException("Unknown type: " + type);
}
variables[receiver.getIndex()] = result;
}
@Override
public void assign(VariableReader receiver, VariableReader assignee) {
variables[receiver.getIndex()] = variables[assignee.getIndex()];
}
@Override
public void cast(VariableReader receiver, VariableReader value, ValueType targetType) {
variables[receiver.getIndex()] = asJvmClass(targetType).cast(variables[value.getIndex()]);
}
@Override
public void cast(VariableReader receiver, VariableReader value, NumericOperandType sourceType,
NumericOperandType targetType) {
Object result;
switch (sourceType) {
case INT: {
int a = (Integer) variables[value.getIndex()];
switch (targetType) {
case INT:
result = a;
break;
case LONG:
result = (long) a;
break;
case FLOAT:
result = (float) a;
break;
case DOUBLE:
result = (double) a;
break;
default:
throw new IllegalArgumentException("Can't cast " + sourceType + " to " + targetType);
}
break;
}
case LONG: {
long a = (Long) variables[value.getIndex()];
switch (targetType) {
case INT:
result = (int) a;
break;
case LONG:
result = a;
break;
case FLOAT:
result = (float) a;
break;
case DOUBLE:
result = (double) a;
break;
default:
throw new IllegalArgumentException("Can't cast " + sourceType + " to " + targetType);
}
break;
}
case FLOAT: {
float a = (Float) variables[value.getIndex()];
switch (targetType) {
case INT:
result = (int) a;
break;
case LONG:
result = (long) a;
break;
case FLOAT:
result = a;
break;
case DOUBLE:
result = (double) a;
break;
default:
throw new IllegalArgumentException("Can't cast " + sourceType + " to " + targetType);
}
break;
}
case DOUBLE: {
double a = (Double) variables[value.getIndex()];
switch (targetType) {
case INT:
result = (int) a;
break;
case LONG:
result = (long) a;
break;
case FLOAT:
result = (float) a;
break;
case DOUBLE:
result = a;
break;
default:
throw new IllegalArgumentException("Can't cast " + sourceType + " to " + targetType);
}
break;
}
default:
throw new IllegalArgumentException("Can't cast " + sourceType + " to " + targetType);
}
variables[receiver.getIndex()] = result;
}
@Override
public void cast(VariableReader receiver, VariableReader value, IntegerSubtype type,
CastIntegerDirection direction) {
switch (direction) {
case FROM_INTEGER: {
int a = (Integer) variables[value.getIndex()];
Object result;
switch (type) {
case BYTE:
result = (byte) a;
break;
case SHORT:
result = (short) a;
break;
case CHAR:
result = (char) a;
break;
default:
throw new IllegalArgumentException("Unknown type: " + type);
}
variables[receiver.getIndex()] = result;
break;
}
case TO_INTEGER: {
Object a = variables[value.getIndex()];
int result;
switch (type) {
case BYTE:
result = (Byte) a;
break;
case SHORT:
result = (Short) a;
break;
case CHAR:
result = (Character) a;
break;
default:
throw new IllegalArgumentException("Unknown type: " + type);
}
variables[receiver.getIndex()] = result;
break;
}
}
}
@Override
public void jumpIf(BranchingCondition cond, VariableReader operand, BasicBlockReader consequent,
BasicBlockReader alternative) {
Object a = variables[operand.getIndex()];
boolean c;
switch (cond) {
case EQUAL:
c = (Integer) a == 0;
break;
case NOT_EQUAL:
c = (Integer) a != 0;
break;
case LESS:
c = (Integer) a < 0;
break;
case LESS_OR_EQUAL:
c = (Integer) a <= 0;
break;
case GREATER:
c = (Integer) a > 0;
break;
case GREATER_OR_EQUAL:
c = (Integer) a >= 0;
break;
case NULL:
c = a == null;
break;
case NOT_NULL:
c = a != null;
break;
default:
throw new IllegalArgumentException("Unknown condition: " + cond);
}
jump(c ? consequent : alternative);
}
@Override
public void jumpIf(BinaryBranchingCondition cond, VariableReader first, VariableReader second,
BasicBlockReader consequent, BasicBlockReader alternative) {
Object a = variables[first.getIndex()];
Object b = variables[second.getIndex()];
boolean c;
switch (cond) {
case EQUAL:
c = ((Integer) a).intValue() == (Integer) b;
break;
case NOT_EQUAL:
c = ((Integer) a).intValue() != (Integer) b;
break;
case REFERENCE_EQUAL:
c = a == b;
break;
case REFERENCE_NOT_EQUAL:
c = a != b;
break;
default:
throw new IllegalArgumentException("Unknown condition: " + cond);
}
jump(c ? consequent : alternative);
}
@Override
public void jump(BasicBlockReader target) {
Object[] newVariables = variables.clone();
for (IncomingReader outgoing : outgoings.get(currentBlock.getIndex())) {
if (outgoing.getPhi().getBasicBlock() != target) {
continue;
}
newVariables[outgoing.getPhi().getReceiver().getIndex()] = variables[outgoing.getValue().getIndex()];
}
variables = newVariables;
currentBlock = target;
}
@Override
public void choose(VariableReader condition, List<? extends SwitchTableEntryReader> table,
BasicBlockReader defaultTarget) {
int value = (Integer) variables[condition.getIndex()];
for (SwitchTableEntryReader entry : table) {
if (value == entry.getCondition()) {
jump(entry.getTarget());
return;
}
}
jump(defaultTarget);
}
@Override
public void exit(VariableReader valueToReturn) {
state = State.EXITED;
result = variables[valueToReturn.getIndex()];
}
@Override
public void raise(VariableReader exception) {
Throwable e = (Throwable) variables[exception.getIndex()];
if (!pickExceptionHandler(e)) {
state = State.EXITED;
result = e;
}
}
@Override
public void createArray(VariableReader receiver, ValueType itemType, VariableReader size) {
Class<?> itemJvmType = asJvmClass(itemType);
int sizeValue = (int) variables[size.getIndex()];
variables[receiver.getIndex()] = Array.newInstance(itemJvmType, sizeValue);
}
@Override
public void createArray(VariableReader receiver, ValueType itemType,
List<? extends VariableReader> dimensions) {
Class<?> itemJvmType = asJvmClass(itemType);
for (int i = 1; i < dimensions.size(); ++i) {
itemJvmType = Array.newInstance(itemJvmType, 0).getClass();
}
variables[receiver.getIndex()] = createArray(itemJvmType, dimensions, 0);
}
private Object createArray(Class<?> itemType, List<? extends VariableReader> dimensions, int dimensionIndex) {
int dimensionValue = (int) variables[dimensions.get(dimensionIndex).getIndex()];
Object result = Array.newInstance(itemType, dimensionValue);
if (dimensionIndex < dimensions.size() - 1) {
for (int i = 0; i < dimensionValue; ++i) {
Array.set(result, i, createArray(itemType.getComponentType(), dimensions, dimensionIndex + 1));
}
}
return result;
}
@Override
public void create(VariableReader receiver, String type) {
Class<?> cls;
try {
cls = Class.forName(type, false, classLoader);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Class not found: " + type);
}
variables[receiver.getIndex()] = null;
}
@Override
public void getField(VariableReader receiver, VariableReader instance, FieldReference field,
ValueType fieldType) {
Field jvmField = getJvmField(field);
Object jvmInstance = instance != null ? variables[instance.getIndex()] : null;
Object result;
try {
result = jvmField.get(jvmInstance);
} catch (IllegalAccessException e) {
throw new RuntimeException("Can't get field value: " + field);
}
variables[receiver.getIndex()] = result;
}
@Override
public void putField(VariableReader instance, FieldReference field, VariableReader value, ValueType fieldType) {
Field jvmField = getJvmField(field);
Object jvmInstance = instance != null ? variables[instance.getIndex()] : null;
try {
jvmField.set(jvmInstance, variables[value.getIndex()]);
} catch (IllegalAccessException e) {
throw new RuntimeException("Can't get field value: " + field);
}
}
private Field getJvmField(FieldReference field) {
Class<?> cls;
try {
cls = Class.forName(field.getClassName(), false, classLoader);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Class not found: " + field.getClassName());
}
Field jvmField;
try {
jvmField = cls.getDeclaredField(field.getFieldName());
} catch (NoSuchFieldException e) {
throw new RuntimeException("Field not found: " + field);
}
jvmField.setAccessible(true);
return jvmField;
}
@Override
public void arrayLength(VariableReader receiver, VariableReader array) {
int value = Array.getLength(variables[array.getIndex()]);
variables[receiver.getIndex()] = value;
}
@Override
public void cloneArray(VariableReader receiver, VariableReader array) {
Object jvmArray = variables[array.getIndex()];
int length = Array.getLength(jvmArray);
Object copy = Array.newInstance(jvmArray.getClass().getComponentType(), length);
for (int i = 0; i < length; ++i) {
Array.set(copy, i, Array.get(array, i));
}
variables[receiver.getIndex()] = copy;
}
@Override
public void unwrapArray(VariableReader receiver, VariableReader array, ArrayElementType elementType) {
variables[receiver.getIndex()] = variables[array.getIndex()];
}
@Override
public void getElement(VariableReader receiver, VariableReader array, VariableReader index,
ArrayElementType type) {
Object jvmArray = variables[array.getIndex()];
int indexValue = (Integer) variables[index.getIndex()];
variables[receiver.getIndex()] = Array.get(jvmArray, indexValue);
}
@Override
public void putElement(VariableReader array, VariableReader index, VariableReader value,
ArrayElementType type) {
Object jvmArray = variables[array.getIndex()];
int indexValue = (Integer) variables[index.getIndex()];
Array.set(jvmArray, indexValue, variables[value.getIndex()]);
}
@Override
public void invoke(VariableReader receiver, VariableReader instance, MethodReference method,
List<? extends VariableReader> arguments, InvocationType type) {
Method jvmMethod = asJvmMethod(method);
Object[] jvmArgs = new Object[arguments.size()];
for (int i = 0; i < jvmArgs.length; ++i) {
jvmArgs[i] = variables[arguments.get(i).getIndex()];
}
Object jvmInstance = instance != null ? variables[instance.getIndex()] : null;
Object result;
try {
result = jvmMethod.invoke(jvmInstance, jvmArgs);
} catch (IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException("Error calling method " + method, e);
}
if (receiver != null) {
variables[receiver.getIndex()] = result;
}
}
private Method asJvmMethod(MethodReference method) {
Class<?> cls;
try {
cls = Class.forName(method.getClassName(), false, classLoader);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Can't find class " + method.getClassName());
}
Class<?>[] jvmParameters = new Class[method.parameterCount()];
for (int i = 0; i < method.parameterCount(); ++i) {
jvmParameters[i] = asJvmClass(method.parameterType(i));
}
Class<?> jvmReturnType = asJvmClass(method.getReturnType());
for (Method jvmMethod : cls.getDeclaredMethods()) {
if (Arrays.equals(jvmMethod.getParameterTypes(), jvmParameters)
&& jvmReturnType.equals(jvmMethod.getReturnType())) {
return jvmMethod;
}
}
throw new RuntimeException("Method not found: " + method);
}
@Override
public void invokeDynamic(VariableReader receiver, VariableReader instance, MethodDescriptor method,
List<? extends VariableReader> arguments, MethodHandle bootstrapMethod,
List<RuntimeConstant> bootstrapArguments) {
throw new RuntimeException("InvokeDynamic is not supported");
}
@Override
public void isInstance(VariableReader receiver, VariableReader value, ValueType type) {
Object jvmValue = variables[value.getIndex()];
Class<?> jvmType = asJvmClass(type);
variables[receiver.getIndex()] = jvmType.isInstance(jvmValue);
}
@Override
public void initClass(String className) {
try {
Class.forName(className);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Class not found: " + className);
}
}
@Override
public void nullCheck(VariableReader receiver, VariableReader value) {
Object jvmValue = variables[value.getIndex()];
if (jvmValue == null) {
throw new NullPointerException();
}
variables[receiver.getIndex()] = jvmValue;
}
@Override
public void monitorEnter(VariableReader objectRef) {
}
@Override
public void monitorExit(VariableReader objectRef) {
}
private Class<?> asJvmClass(ValueType type) {
if (type instanceof ValueType.Primitive) {
switch (((ValueType.Primitive) type).getKind()) {
case BOOLEAN:
return boolean.class;
case BYTE:
return byte.class;
case SHORT:
return short.class;
case CHARACTER:
return char.class;
case INTEGER:
return int.class;
case LONG:
return long.class;
case FLOAT:
return float.class;
case DOUBLE:
return double.class;
default:
break;
}
} else if (type instanceof ValueType.Void) {
return void.class;
} else if (type instanceof ValueType.Array) {
Class<?> itemJvmClass = asJvmClass(((ValueType.Array) type).getItemType());
return Array.newInstance(itemJvmClass, 0).getClass();
} else if (type instanceof ValueType.Object) {
try {
Class.forName(((ValueType.Object) type).getClassName(), false, classLoader);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Class not found: " + type);
}
}
throw new IllegalArgumentException("Unknown type: " + type);
}
};
private enum State {
EXECUTING,
EXITED,
THROWN
}
}
|
jtulach/teavm
|
core/src/main/java/org/teavm/model/Interpreter.java
|
Java
|
apache-2.0
| 32,692 | 38.058542 | 120 | 0.459684 | false |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.externalSystem.util;
import com.intellij.execution.rmi.RemoteUtil;
import com.intellij.ide.highlighter.ArchiveFileType;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.*;
import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.ExternalSystemModulePropertyManager;
import com.intellij.openapi.externalSystem.ExternalSystemUiAware;
import com.intellij.openapi.externalSystem.model.*;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.model.project.ProjectData;
import com.intellij.openapi.externalSystem.model.settings.ExternalSystemExecutionSettings;
import com.intellij.openapi.externalSystem.model.task.TaskData;
import com.intellij.openapi.externalSystem.service.project.ProjectDataManager;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemLocalSettings;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings;
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings;
import com.intellij.openapi.externalSystem.settings.ExternalSystemSettingsListener;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ExternalProjectSystemRegistry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.ProjectModelExternalSource;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.BooleanFunction;
import com.intellij.util.NullableFunction;
import com.intellij.util.PathsList;
import com.intellij.util.SmartList;
import com.intellij.util.concurrency.EdtExecutorService;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.*;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.*;
import java.util.function.Consumer;
/**
* @author Denis Zhdanov
*/
public final class ExternalSystemApiUtil {
@NotNull public static final String PATH_SEPARATOR = "/";
@NotNull public static final Comparator<Object> ORDER_AWARE_COMPARATOR = new Comparator<>() {
@Override
public int compare(@NotNull Object o1, @NotNull Object o2) {
int order1 = getOrder(o1);
int order2 = getOrder(o2);
return Integer.compare(order1, order2);
}
private int getOrder(@NotNull Object o) {
Queue<Class<?>> toCheck = new ArrayDeque<>();
toCheck.add(o.getClass());
while (!toCheck.isEmpty()) {
Class<?> clazz = toCheck.poll();
Order annotation = clazz.getAnnotation(Order.class);
if (annotation != null) {
return annotation.value();
}
Class<?> c = clazz.getSuperclass();
if (c != null) {
toCheck.add(c);
}
Class<?>[] interfaces = clazz.getInterfaces();
Collections.addAll(toCheck, interfaces);
}
return ExternalSystemConstants.UNORDERED;
}
};
@NotNull private static final NullableFunction<DataNode<?>, Key<?>> GROUPER = node -> node.getKey();
private ExternalSystemApiUtil() {
}
@NotNull
public static String extractNameFromPath(@NotNull String path) {
String strippedPath = stripPath(path);
final int i = strippedPath.lastIndexOf(PATH_SEPARATOR);
final String result;
if (i < 0 || i >= strippedPath.length() - 1) {
result = strippedPath;
}
else {
result = strippedPath.substring(i + 1);
}
return result;
}
@NotNull
private static String stripPath(@NotNull String path) {
String[] endingsToStrip = {"/", "!", ".jar"};
StringBuilder buffer = new StringBuilder(path);
for (String ending : endingsToStrip) {
if (buffer.lastIndexOf(ending) == buffer.length() - ending.length()) {
buffer.setLength(buffer.length() - ending.length());
}
}
return buffer.toString();
}
@NotNull
public static String getLibraryName(@NotNull Library library) {
final String result = library.getName();
if (result != null) {
return result;
}
for (OrderRootType type : OrderRootType.getAllTypes()) {
for (String url : library.getUrls(type)) {
String candidate = extractNameFromPath(url);
if (!StringUtil.isEmpty(candidate)) {
return candidate;
}
}
}
assert false;
return "unknown-lib";
}
public static boolean isRelated(@NotNull Library library, @NotNull LibraryData libraryData) {
return getLibraryName(library).equals(libraryData.getInternalName());
}
public static boolean isExternalSystemLibrary(@NotNull Library library, @NotNull ProjectSystemId externalSystemId) {
return library.getName() != null && StringUtil.startsWithIgnoreCase(library.getName(), externalSystemId.getId() + ": ");
}
public static void orderAwareSort(@NotNull List<?> data) {
data.sort(ORDER_AWARE_COMPARATOR);
}
/**
* @param path target path
* @return path that points to the same location as the given one and that uses only slashes
*/
@NotNull
public static String toCanonicalPath(@NotNull String path) {
String p = normalizePath(path);
assert p != null;
return FileUtil.toCanonicalPath(p);
}
@NotNull
public static String getLocalFileSystemPath(@NotNull VirtualFile file) {
if (FileTypeRegistry.getInstance().isFileOfType(file, ArchiveFileType.INSTANCE)) {
final VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(file);
if (jar != null) {
return jar.getPath();
}
}
return toCanonicalPath(file.getPath());
}
@Nullable
public static ExternalSystemManager<?, ?, ?, ?, ?> getManager(@NotNull ProjectSystemId externalSystemId) {
return ExternalSystemManager.EP_NAME.findFirstSafe(manager -> externalSystemId.equals(manager.getSystemId()));
}
@NotNull
public static List<ExternalSystemManager<?, ?, ?, ?, ?>> getAllManagers() {
return ExternalSystemManager.EP_NAME.getExtensionList();
}
public static MultiMap<Key<?>, DataNode<?>> recursiveGroup(@NotNull Collection<? extends DataNode<?>> nodes) {
MultiMap<Key<?>, DataNode<?>> result = new ContainerUtil.KeyOrderedMultiMap<>();
Queue<Collection<? extends DataNode<?>>> queue = new LinkedList<>();
queue.add(nodes);
while (!queue.isEmpty()) {
Collection<? extends DataNode<?>> _nodes = queue.remove();
result.putAllValues(group(_nodes));
for (DataNode<?> _node : _nodes) {
queue.add(_node.getChildren());
}
}
return result;
}
@NotNull
public static MultiMap<Key<?>, DataNode<?>> group(@NotNull Collection<? extends DataNode<?>> nodes) {
return ContainerUtil.groupBy(nodes, GROUPER);
}
@NotNull
public static <K, V> MultiMap<DataNode<K>, DataNode<V>> groupBy(@NotNull Collection<? extends DataNode<V>> nodes, final Class<K> moduleDataClass) {
return ContainerUtil.groupBy(nodes, node -> node.getParent(moduleDataClass));
}
@NotNull
public static <K, V> MultiMap<DataNode<K>, DataNode<V>> groupBy(@NotNull Collection<? extends DataNode<V>> nodes, @NotNull final Key<K> key) {
return ContainerUtil.groupBy(nodes, node -> node.getDataNode(key));
}
@SuppressWarnings("unchecked")
@NotNull
public static <T> Collection<DataNode<T>> getChildren(@NotNull DataNode<?> node, @NotNull Key<T> key) {
Collection<DataNode<T>> result = null;
for (DataNode<?> child : node.getChildren()) {
if (!key.equals(child.getKey())) {
continue;
}
if (result == null) {
result = new ArrayList<>();
}
result.add((DataNode<T>)child);
}
return result == null ? Collections.emptyList() : result;
}
@SuppressWarnings("unchecked")
@Nullable
public static <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey())) {
return (DataNode<T>)child;
}
}
return null;
}
@SuppressWarnings("unchecked")
@Nullable
public static <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key, BooleanFunction<? super DataNode<T>> predicate) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey()) && predicate.fun((DataNode<T>)child)) {
return (DataNode<T>)child;
}
}
return null;
}
@Nullable
public static <T> DataNode<T> findParent(@NotNull DataNode<?> node, @NotNull Key<T> key) {
return findParent(node, key, null);
}
@SuppressWarnings("unchecked")
@Nullable
public static <T> DataNode<T> findParent(@NotNull DataNode<?> node,
@NotNull Key<T> key,
@Nullable BooleanFunction<? super DataNode<T>> predicate) {
DataNode<?> parent = node.getParent();
if (parent == null) return null;
return key.equals(parent.getKey()) && (predicate == null || predicate.fun((DataNode<T>)parent))
? (DataNode<T>)parent : findParent(parent, key, predicate);
}
@NotNull
public static <T> Collection<DataNode<T>> findAll(@NotNull DataNode<?> parent, @NotNull Key<T> key) {
return getChildren(parent, key);
}
public static void visit(@Nullable DataNode<?> originalNode, @NotNull Consumer<? super DataNode<?>> consumer) {
if (originalNode != null) {
originalNode.visit(consumer);
}
}
@NotNull
public static <T> Collection<DataNode<T>> findAllRecursively(@Nullable final DataNode<?> node,
@NotNull final Key<T> key) {
if (node == null) return Collections.emptyList();
final Collection<DataNode<?>> nodes = findAllRecursively(node.getChildren(), node1 -> node1.getKey().equals(key));
//noinspection unchecked
return new SmartList(nodes);
}
@NotNull
public static Collection<DataNode<?>> findAllRecursively(@NotNull Collection<? extends DataNode<?>> nodes) {
return findAllRecursively(nodes, null);
}
@NotNull
public static Collection<DataNode<?>> findAllRecursively(@Nullable DataNode<?> node,
@Nullable BooleanFunction<? super DataNode<?>> predicate) {
if (node == null) return Collections.emptyList();
return findAllRecursively(node.getChildren(), predicate);
}
@NotNull
public static Collection<DataNode<?>> findAllRecursively(@NotNull Collection<? extends DataNode<?>> nodes,
@Nullable BooleanFunction<? super DataNode<?>> predicate) {
SmartList<DataNode<?>> result = new SmartList<>();
for (DataNode<?> node : nodes) {
if (predicate == null || predicate.fun(node)) {
result.add(node);
}
}
for (DataNode<?> node : nodes) {
result.addAll(findAllRecursively(node.getChildren(), predicate));
}
return result;
}
@Nullable
public static DataNode<?> findFirstRecursively(@NotNull DataNode<?> parentNode,
@NotNull BooleanFunction<? super DataNode<?>> predicate) {
Queue<DataNode<?>> queue = new LinkedList<>();
queue.add(parentNode);
return findInQueue(queue, predicate);
}
@Nullable
public static DataNode<?> findFirstRecursively(@NotNull Collection<? extends DataNode<?>> nodes,
@NotNull BooleanFunction<? super DataNode<?>> predicate) {
return findInQueue(new LinkedList<>(nodes), predicate);
}
@Nullable
private static DataNode<?> findInQueue(@NotNull Queue<DataNode<?>> queue,
@NotNull BooleanFunction<? super DataNode<?>> predicate) {
while (!queue.isEmpty()) {
DataNode<?> node = queue.remove();
if (predicate.fun(node)) {
return node;
}
queue.addAll(node.getChildren());
}
return null;
}
public static void executeProjectChangeAction(@NotNull final DisposeAwareProjectChange task) {
executeProjectChangeAction(true, task);
}
public static void executeProjectChangeAction(boolean synchronous, @NotNull final DisposeAwareProjectChange task) {
if (!ApplicationManager.getApplication().isDispatchThread()) {
TransactionGuard.getInstance().assertWriteSafeContext(ModalityState.defaultModalityState());
}
executeOnEdt(synchronous, () -> ApplicationManager.getApplication().runWriteAction(task));
}
public static void executeOnEdt(boolean synchronous, @NotNull Runnable task) {
final Application app = ApplicationManager.getApplication();
if (app.isDispatchThread()) {
task.run();
return;
}
if (synchronous) {
app.invokeAndWait(task);
}
else {
app.invokeLater(task);
}
}
public static <T> T executeOnEdt(@NotNull final Computable<T> task) {
final Application app = ApplicationManager.getApplication();
final Ref<T> result = Ref.create();
app.invokeAndWait(() -> result.set(task.compute()));
return result.get();
}
public static <T> T doWriteAction(@NotNull final Computable<T> task) {
return executeOnEdt(() -> ApplicationManager.getApplication().runWriteAction(task));
}
public static void doWriteAction(@NotNull final Runnable task) {
executeOnEdt(true, () -> ApplicationManager.getApplication().runWriteAction(task));
}
/**
* Adds runnable to Event Dispatch Queue
* if we aren't in UnitTest of Headless environment mode
*
* @param runnable Runnable
*/
public static void addToInvokeLater(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
final boolean unitTestMode = application.isUnitTestMode();
if (unitTestMode) {
UIUtil.invokeLaterIfNeeded(runnable);
}
else if (application.isHeadlessEnvironment() || application.isDispatchThread()) {
runnable.run();
}
else {
EdtExecutorService.getInstance().execute(runnable);
}
}
/**
* @deprecated there is no need to call this method since we don't put message bundles to separate resources_en.jar files (IDEA-255246)
*/
@Deprecated
public static void addBundle(@NotNull PathsList classPath, @NotNull String bundlePath, @NotNull Class<?> contextClass) {
String pathToUse = bundlePath.replace('.', '/');
if (!pathToUse.endsWith(".properties")) {
pathToUse += ".properties";
}
if (!pathToUse.startsWith("/")) {
pathToUse = '/' + pathToUse;
}
String root = PathManager.getResourceRoot(contextClass, pathToUse);
if (root != null) {
classPath.add(root);
}
}
@Nullable
public static String normalizePath(@Nullable String s) {
return s == null ? null : s.replace('\\', ExternalSystemConstants.PATH_SEPARATOR);
}
/**
* Allows to answer if given ide project has 1-1 mapping with the given external project, i.e. the ide project has been
* imported from external system and no other external projects have been added.
* <p/>
* This might be necessary in a situation when project-level setting is changed (e.g. project name). We don't want to rename
* ide project if it doesn't completely corresponds to the given ide project then.
*
* @param ideProject target ide project
* @param projectData target external project
* @param modules the list of modules to check (during import this contains uncommitted modules from the modifiable model)
* @return {@code true} if given ide project has 1-1 mapping to the given external project;
* {@code false} otherwise
*/
public static boolean isOneToOneMapping(@NotNull Project ideProject, @NotNull ProjectData projectData, Module[] modules) {
String linkedExternalProjectPath = null;
for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemManager.EP_NAME.getIterable()) {
ProjectSystemId externalSystemId = manager.getSystemId();
AbstractExternalSystemSettings systemSettings = getSettings(ideProject, externalSystemId);
Collection projectsSettings = systemSettings.getLinkedProjectsSettings();
int linkedProjectsNumber = projectsSettings.size();
if (linkedProjectsNumber > 1) {
// More than one external project of the same external system type is linked to the given ide project.
return false;
}
else if (linkedProjectsNumber == 1) {
if (linkedExternalProjectPath == null) {
// More than one external project of different external system types is linked to the current ide project.
linkedExternalProjectPath = ((ExternalProjectSettings)projectsSettings.iterator().next()).getExternalProjectPath();
}
else {
return false;
}
}
}
if (linkedExternalProjectPath != null && !linkedExternalProjectPath.equals(projectData.getLinkedExternalProjectPath())) {
// New external project is being linked.
return false;
}
for (Module module : modules) {
if (!isExternalSystemAwareModule(projectData.getOwner(), module)) {
return false;
}
}
return true;
}
@NotNull
public static @NlsSafe String getProjectRepresentationName(@NotNull String targetProjectPath, @Nullable String rootProjectPath) {
if (rootProjectPath == null) {
File rootProjectDir = new File(targetProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
return rootProjectDir.getName();
}
File rootProjectDir = new File(rootProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
File targetProjectDir = new File(targetProjectPath);
if (targetProjectDir.isFile()) {
targetProjectDir = targetProjectDir.getParentFile();
}
StringBuilder buffer = new StringBuilder();
for (File f = targetProjectDir; f != null && !FileUtil.filesEqual(f, rootProjectDir); f = f.getParentFile()) {
buffer.insert(0, f.getName()).insert(0, ":");
}
buffer.insert(0, rootProjectDir.getName());
return buffer.toString();
}
/**
* There is a possible case that external project linked to an ide project is a multi-project, i.e. contains more than one
* module.
* <p/>
* This method tries to find root project's config path assuming that given path points to a sub-project's config path.
*
* @param externalProjectPath external sub-project's config path
* @param externalSystemId target external system
* @param project target ide project
* @return root external project's path if given path is considered to point to a known sub-project's config;
* {@code null} if it's not possible to find a root project's config path on the basis of the
* given path
*/
@Nullable
public static String getRootProjectPath(@NotNull String externalProjectPath,
@NotNull ProjectSystemId externalSystemId,
@NotNull Project project) {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
return null;
}
if (manager instanceof ExternalSystemAutoImportAware) {
return ((ExternalSystemAutoImportAware)manager).getAffectedExternalProjectPath(externalProjectPath, project);
}
return null;
}
/**
* {@link RemoteUtil#unwrap(Throwable) unwraps} given exception if possible and builds error message for it.
*
* @param e exception to process
* @return error message for the given exception
*/
@NotNull
public static @Nls String buildErrorMessage(@NotNull Throwable e) {
Throwable unwrapped = RemoteUtil.unwrap(e);
String reason = unwrapped.getLocalizedMessage();
if (!StringUtil.isEmpty(reason)) {
return reason;
}
else if (unwrapped.getClass() == ExternalSystemException.class) {
String originalReason = ((ExternalSystemException)unwrapped).getOriginalReason();
return ExternalSystemBundle.message("external.system.api.error.message.prefix", originalReason);
}
else {
return stacktraceAsString(unwrapped);
}
}
@NotNull
public static @NlsSafe String stacktraceAsString(@NotNull Throwable throwable) {
Throwable unwrapped = RemoteUtil.unwrap(throwable);
StringWriter writer = new StringWriter();
unwrapped.printStackTrace(new PrintWriter(writer));
return writer.toString();
}
@NotNull
public static AbstractExternalSystemSettings getSettings(@NotNull Project project, @NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve external system settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return manager.getSettingsProvider().fun(project);
}
@SuppressWarnings("unchecked")
public static <S extends AbstractExternalSystemLocalSettings> S getLocalSettings(@NotNull Project project,
@NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve local external system settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return (S)manager.getLocalSettingsProvider().fun(project);
}
@SuppressWarnings("unchecked")
public static <S extends ExternalSystemExecutionSettings> S getExecutionSettings(@NotNull Project project,
@NotNull String linkedProjectPath,
@NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve external system execution settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return (S)manager.getExecutionSettingsProvider().fun(Pair.create(project, linkedProjectPath));
}
/**
* Historically we prefer to work with third-party api not from ide process but from dedicated slave process (there is a risk
* that third-party api has bugs which might make the whole ide process corrupted, e.g. a memory leak at the api might crash
* the whole ide process).
* <p/>
* However, we do allow to explicitly configure the ide to work with third-party external system api from the ide process.
* <p/>
* This method allows to check whether the ide is configured to use 'out of process' or 'in process' mode for the system.
*
* @param externalSystemId target external system
* @return {@code true} if the ide is configured to work with external system api from the ide process;
* {@code false} otherwise
*/
public static boolean isInProcessMode(ProjectSystemId externalSystemId) {
return Registry.is(externalSystemId.getId() + ExternalSystemConstants.USE_IN_PROCESS_COMMUNICATION_REGISTRY_KEY_SUFFIX, false);
}
public static ProjectModelExternalSource toExternalSource(@NotNull ProjectSystemId systemId) {
return ExternalProjectSystemRegistry.getInstance().getSourceById(systemId.getId());
}
@Contract(value = "_, null -> false", pure = true)
public static boolean isExternalSystemAwareModule(@NotNull ProjectSystemId systemId, @Nullable Module module) {
return module != null &&
!module.isDisposed() &&
systemId.getId().equals(ExternalSystemModulePropertyManager.getInstance(module).getExternalSystemId());
}
@Contract(value = "_, null -> false", pure = true)
public static boolean isExternalSystemAwareModule(@NotNull String systemId, @Nullable Module module) {
return module != null &&
!module.isDisposed() &&
systemId.equals(ExternalSystemModulePropertyManager.getInstance(module).getExternalSystemId());
}
@Nullable
@Contract(pure = true)
public static String getExternalProjectPath(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getLinkedProjectPath() : null;
}
@Nullable
@Contract(pure = true)
public static String getExternalRootProjectPath(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getRootProjectPath() : null;
}
@Nullable
@Contract(pure = true)
public static String getExternalProjectId(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getLinkedProjectId() : null;
}
@Nullable
@Contract(pure = true)
public static String getExternalProjectGroup(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleGroup() : null;
}
@Nullable
@Contract(pure = true)
public static String getExternalProjectVersion(@Nullable Module module) {
return module != null && !module.isDisposed()
? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleVersion()
: null;
}
@Nullable
@Contract(pure = true)
public static String getExternalModuleType(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleType() : null;
}
public static void subscribe(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull ExternalSystemSettingsListener listener) {
//noinspection unchecked
getSettings(project, systemId).subscribe(listener, project);
}
public static void subscribe(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull ExternalSystemSettingsListener listener,
@NotNull Disposable parentDisposable) {
//noinspection unchecked
getSettings(project, systemId).subscribe(listener, parentDisposable);
}
@NotNull
public static Collection<TaskData> findProjectTasks(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
AbstractExternalSystemSettings settings = getSettings(project, systemId);
ExternalProjectSettings linkedProjectSettings = settings.getLinkedProjectSettings(projectPath);
if (linkedProjectSettings == null) return Collections.emptyList();
ExternalProjectInfo projectInfo = ContainerUtil.find(
ProjectDataManager.getInstance().getExternalProjectsData(project, systemId),
info -> FileUtil.pathsEqual(linkedProjectSettings.getExternalProjectPath(), info.getExternalProjectPath())
);
if (projectInfo == null) return Collections.emptyList();
DataNode<ProjectData> projectStructure = projectInfo.getExternalProjectStructure();
if (projectStructure == null) return Collections.emptyList();
List<TaskData> tasks = new SmartList<>();
DataNode<ModuleData> moduleDataNode = ContainerUtil.find(
findAll(projectStructure, ProjectKeys.MODULE),
moduleNode -> FileUtil.pathsEqual(projectPath, moduleNode.getData().getLinkedExternalProjectPath())
);
if (moduleDataNode == null) return Collections.emptyList();
findAll(moduleDataNode, ProjectKeys.TASK).stream().map(DataNode::getData).forEach(tasks::add);
return tasks;
}
@ApiStatus.Experimental
@Nullable
public static DataNode<ProjectData> findProjectData(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
ExternalProjectInfo projectInfo = findProjectInfo(project, systemId, projectPath);
if (projectInfo == null) return null;
return projectInfo.getExternalProjectStructure();
}
@ApiStatus.Experimental
@Nullable
public static ExternalProjectInfo findProjectInfo(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
AbstractExternalSystemSettings settings = getSettings(project, systemId);
ExternalProjectSettings linkedProjectSettings = settings.getLinkedProjectSettings(projectPath);
if (linkedProjectSettings == null) return null;
String rootProjectPath = linkedProjectSettings.getExternalProjectPath();
return ProjectDataManager.getInstance().getExternalProjectData(project, systemId, rootProjectPath);
}
public static @NotNull FileChooserDescriptor getExternalProjectConfigDescriptor(@NotNull ProjectSystemId systemId) {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(systemId);
if (manager instanceof ExternalSystemUiAware) {
ExternalSystemUiAware uiAware = ((ExternalSystemUiAware)manager);
FileChooserDescriptor descriptor = uiAware.getExternalProjectConfigDescriptor();
if (descriptor != null) {
return descriptor;
}
}
return FileChooserDescriptorFactory.createSingleLocalFileDescriptor();
}
}
|
siosio/intellij-community
|
platform/external-system-api/src/com/intellij/openapi/externalSystem/util/ExternalSystemApiUtil.java
|
Java
|
apache-2.0
| 30,895 | 40.138482 | 149 | 0.691245 | false |
<?php
new WPCOM_JSON_API_Update_Post_Endpoint( array(
'description' => 'Create a post.',
'group' => 'posts',
'stat' => 'posts:new',
'new_version' => '1.2',
'max_version' => '1',
'method' => 'POST',
'path' => '/sites/%s/posts/new',
'path_labels' => array(
'$site' => '(int|string) Site ID or domain',
),
'request_format' => array(
// explicitly document all input
'date' => "(ISO 8601 datetime) The post's creation time.",
'title' => '(HTML) The post title.',
'content' => '(HTML) The post content.',
'excerpt' => '(HTML) An optional post excerpt.',
'slug' => '(string) The name (slug) for the post, used in URLs.',
'author' => '(string) The username or ID for the user to assign the post to.',
'publicize' => '(array|bool) True or false if the post be publicized to external services. An array of services if we only want to publicize to a select few. Defaults to true.',
'publicize_message' => '(string) Custom message to be publicized to external services.',
'status' => array(
'publish' => 'Publish the post.',
'private' => 'Privately publish the post.',
'draft' => 'Save the post as a draft.',
'pending' => 'Mark the post as pending editorial approval.',
'auto-draft' => 'Save a placeholder for a newly created post, with no content.',
),
'sticky' => array(
'false' => 'Post is not marked as sticky.',
'true' => 'Stick the post to the front page.',
),
'password' => '(string) The plaintext password protecting the post, or, more likely, the empty string if the post is not password protected.',
'parent' => "(int) The post ID of the new post's parent.",
'type' => "(string) The post type. Defaults to 'post'. Post types besides post and page need to be whitelisted using the <code>rest_api_allowed_post_types</code> filter.",
'categories' => "(array|string) Comma-separated list or array of categories (name or id)",
'tags' => "(array|string) Comma-separated list or array of tags (name or id)",
'format' => array_merge( array( 'default' => 'Use default post format' ), get_post_format_strings() ),
'featured_image' => "(string) The post ID of an existing attachment to set as the featured image. Pass an empty string to delete the existing image.",
'media' => "(media) An array of files to attach to the post. To upload media, the entire request should be multipart/form-data encoded. Multiple media items will be displayed in a gallery. Accepts jpg, jpeg, png, gif, pdf, doc, ppt, odt, pptx, docx, pps, ppsx, xls, xlsx, key. Audio and Video may also be available. See <code>allowed_file_types</code> in the options response of the site endpoint. <br /><br /><strong>Example</strong>:<br />" .
"<code>curl \<br />--form 'title=Image' \<br />--form 'media[]=@/path/to/file.jpg' \<br />-H 'Authorization: BEARER your-token' \<br />'https://public-api.wordpress.com/rest/v1/sites/123/posts/new'</code>",
'media_urls' => "(array) An array of URLs for images to attach to a post. Sideloads the media in for a post.",
'metadata' => "(array) Array of metadata objects containing the following properties: `key` (metadata key), `id` (meta ID), `previous_value` (if set, the action will only occur for the provided previous value), `value` (the new value to set the meta to), `operation` (the operation to perform: `update` or `add`; defaults to `update`). All unprotected meta keys are available by default for read requests. Both unprotected and protected meta keys are avaiable for authenticated requests with proper capabilities. Protected meta keys can be made available with the <code>rest_api_allowed_public_metadata</code> filter.",
'comments_open' => "(bool) Should the post be open to comments? Defaults to the blog's preference.",
'pings_open' => "(bool) Should the post be open to comments? Defaults to the blog's preference.",
'likes_enabled' => "(bool) Should the post be open to likes? Defaults to the blog's preference.",
'sharing_enabled' => "(bool) Should sharing buttons show on this post? Defaults to true.",
'menu_order' => "(int) (Pages Only) the order pages should appear in. Use 0 to maintain alphabetical order.",
),
'example_request' => 'https://public-api.wordpress.com/rest/v1/sites/82974409/posts/new/',
'example_request_data' => array(
'headers' => array(
'authorization' => 'Bearer YOUR_API_TOKEN'
),
'body' => array(
'title' => 'Hello World',
'content' => 'Hello. I am a test post. I was created by the API',
'tags' => 'tests',
'categories' => 'API'
)
)
) );
new WPCOM_JSON_API_Update_Post_Endpoint( array(
'description' => 'Edit a post.',
'group' => 'posts',
'stat' => 'posts:1:POST',
'new_version' => '1.2',
'max_version' => '1',
'method' => 'POST',
'path' => '/sites/%s/posts/%d',
'path_labels' => array(
'$site' => '(int|string) Site ID or domain',
'$post_ID' => '(int) The post ID',
),
'request_format' => array(
'date' => "(ISO 8601 datetime) The post's creation time.",
'title' => '(HTML) The post title.',
'content' => '(HTML) The post content.',
'excerpt' => '(HTML) An optional post excerpt.',
'slug' => '(string) The name (slug) for the post, used in URLs.',
'author' => '(string) The username or ID for the user to assign the post to.',
'publicize' => '(array|bool) True or false if the post be publicized to external services. An array of services if we only want to publicize to a select few. Defaults to true.',
'publicize_message' => '(string) Custom message to be publicized to external services.',
'status' => array(
'publish' => 'Publish the post.',
'private' => 'Privately publish the post.',
'draft' => 'Save the post as a draft.',
'pending' => 'Mark the post as pending editorial approval.',
'trash' => 'Set the post as trashed.',
),
'sticky' => array(
'false' => 'Post is not marked as sticky.',
'true' => 'Stick the post to the front page.',
),
'password' => '(string) The plaintext password protecting the post, or, more likely, the empty string if the post is not password protected.',
'parent' => "(int) The post ID of the new post's parent.",
'categories' => "(array|string) Comma-separated list or array of categories (name or id)",
'tags' => "(array|string) Comma-separated list or array of tags (name or id)",
'format' => array_merge( array( 'default' => 'Use default post format' ), get_post_format_strings() ),
'comments_open' => '(bool) Should the post be open to comments?',
'pings_open' => '(bool) Should the post be open to comments?',
'likes_enabled' => "(bool) Should the post be open to likes?",
'menu_order' => "(int) (Pages Only) the order pages should appear in. Use 0 to maintain alphabetical order.",
'sharing_enabled' => "(bool) Should sharing buttons show on this post?",
'featured_image' => "(string) The post ID of an existing attachment to set as the featured image. Pass an empty string to delete the existing image.",
'media' => "(media) An array of files to attach to the post. To upload media, the entire request should be multipart/form-data encoded. Multiple media items will be displayed in a gallery. Accepts jpg, jpeg, png, gif, pdf, doc, ppt, odt, pptx, docx, pps, ppsx, xls, xlsx, key. Audio and Video may also be available. See <code>allowed_file_types</code> in the options resposne of the site endpoint. <br /><br /><strong>Example</strong>:<br />" .
"<code>curl \<br />--form 'title=Image' \<br />--form 'media[]=@/path/to/file.jpg' \<br />-H 'Authorization: BEARER your-token' \<br />'https://public-api.wordpress.com/rest/v1/sites/123/posts/new'</code>",
'media_urls' => "(array) An array of URLs for images to attach to a post. Sideloads the media in for a post.",
'metadata' => "(array) Array of metadata objects containing the following properties: `key` (metadata key), `id` (meta ID), `previous_value` (if set, the action will only occur for the provided previous value), `value` (the new value to set the meta to), `operation` (the operation to perform: `update` or `add`; defaults to `update`). All unprotected meta keys are available by default for read requests. Both unprotected and protected meta keys are available for authenticated requests with proper capabilities. Protected meta keys can be made available with the <code>rest_api_allowed_public_metadata</code> filter.",
),
'example_request' => 'https://public-api.wordpress.com/rest/v1/sites/82974409/posts/881',
'example_request_data' => array(
'headers' => array(
'authorization' => 'Bearer YOUR_API_TOKEN'
),
'body' => array(
'title' => 'Hello World (Again)',
'content' => 'Hello. I am an edited post. I was edited by the API',
'tags' => 'tests',
'categories' => 'API'
)
)
) );
new WPCOM_JSON_API_Update_Post_Endpoint( array(
'description' => 'Delete a post. Note: If the trash is enabled, this request will send the post to the trash. A second request will permanently delete the post.',
'group' => 'posts',
'stat' => 'posts:1:delete',
'new_version' => '1.1',
'max_version' => '1',
'method' => 'POST',
'path' => '/sites/%s/posts/%d/delete',
'path_labels' => array(
'$site' => '(int|string) Site ID or domain',
'$post_ID' => '(int) The post ID',
),
'example_request' => 'https://public-api.wordpress.com/rest/v1/sites/82974409/posts/$post_ID/delete/',
'example_request_data' => array(
'headers' => array(
'authorization' => 'Bearer YOUR_API_TOKEN'
)
)
) );
new WPCOM_JSON_API_Update_Post_Endpoint( array(
'description' => 'Restore a post or page from the trash to its previous status.',
'group' => 'posts',
'stat' => 'posts:1:restore',
'method' => 'POST',
'new_version' => '1.1',
'max_version' => '1',
'path' => '/sites/%s/posts/%d/restore',
'path_labels' => array(
'$site' => '(int|string) Site ID or domain',
'$post_ID' => '(int) The post ID',
),
'example_request' => 'https://public-api.wordpress.com/rest/v1/sites/82974409/posts/$post_ID/restore/',
'example_request_data' => array(
'headers' => array(
'authorization' => 'Bearer YOUR_API_TOKEN'
)
)
) );
class WPCOM_JSON_API_Update_Post_Endpoint extends WPCOM_JSON_API_Post_Endpoint {
function __construct( $args ) {
parent::__construct( $args );
if ( $this->api->ends_with( $this->path, '/delete' ) ) {
$this->post_object_format['status']['deleted'] = 'The post has been deleted permanently.';
}
}
// /sites/%s/posts/new -> $blog_id
// /sites/%s/posts/%d -> $blog_id, $post_id
// /sites/%s/posts/%d/delete -> $blog_id, $post_id
// /sites/%s/posts/%d/restore -> $blog_id, $post_id
function callback( $path = '', $blog_id = 0, $post_id = 0 ) {
$blog_id = $this->api->switch_to_blog_and_validate_user( $this->api->get_blog_id( $blog_id ) );
if ( is_wp_error( $blog_id ) ) {
return $blog_id;
}
if ( $this->api->ends_with( $path, '/delete' ) ) {
return $this->delete_post( $path, $blog_id, $post_id );
} elseif ( $this->api->ends_with( $path, '/restore' ) ) {
return $this->restore_post( $path, $blog_id, $post_id );
} else {
return $this->write_post( $path, $blog_id, $post_id );
}
}
// /sites/%s/posts/new -> $blog_id
// /sites/%s/posts/%d -> $blog_id, $post_id
function write_post( $path, $blog_id, $post_id ) {
$new = $this->api->ends_with( $path, '/new' );
$args = $this->query_args();
// unhook publicize, it's hooked again later -- without this, skipping services is impossible
if ( defined( 'IS_WPCOM' ) && IS_WPCOM ) {
remove_action( 'save_post', array( $GLOBALS['publicize_ui']->publicize, 'async_publicize_post' ), 100, 2 );
add_action( 'rest_api_inserted_post', array( $GLOBALS['publicize_ui']->publicize, 'async_publicize_post' ) );
}
if ( $new ) {
$input = $this->input( true );
if ( 'revision' === $input['type'] ) {
if ( ! isset( $input['parent'] ) ) {
return new WP_Error( 'invalid_input', 'Invalid request input', 400 );
}
$input['status'] = 'inherit'; // force inherit for revision type
$input['slug'] = $input['parent'] . '-autosave-v1';
}
elseif ( !isset( $input['title'] ) && !isset( $input['content'] ) && !isset( $input['excerpt'] ) ) {
return new WP_Error( 'invalid_input', 'Invalid request input', 400 );
}
// default to post
if ( empty( $input['type'] ) )
$input['type'] = 'post';
$post_type = get_post_type_object( $input['type'] );
if ( ! $this->is_post_type_allowed( $input['type'] ) ) {
return new WP_Error( 'unknown_post_type', 'Unknown post type', 404 );
}
if ( ! empty( $input['author'] ) ) {
$author_id = $this->parse_and_set_author( $input['author'], $input['type'] );
unset( $input['author'] );
if ( is_wp_error( $author_id ) )
return $author_id;
}
if ( 'publish' === $input['status'] ) {
if ( ! current_user_can( $post_type->cap->publish_posts ) ) {
if ( current_user_can( $post_type->cap->edit_posts ) ) {
$input['status'] = 'pending';
} else {
return new WP_Error( 'unauthorized', 'User cannot publish posts', 403 );
}
}
} else {
if ( !current_user_can( $post_type->cap->edit_posts ) ) {
return new WP_Error( 'unauthorized', 'User cannot edit posts', 403 );
}
}
} else {
$input = $this->input( false );
if ( !is_array( $input ) || !$input ) {
return new WP_Error( 'invalid_input', 'Invalid request input', 400 );
}
if ( isset( $input['status'] ) && 'trash' === $input['status'] && ! current_user_can( 'delete_post', $post_id ) ) {
return new WP_Error( 'unauthorized', 'User cannot delete post', 403 );
}
$post = get_post( $post_id );
$_post_type = ( ! empty( $input['type'] ) ) ? $input['type'] : $post->post_type;
$post_type = get_post_type_object( $_post_type );
if ( !$post || is_wp_error( $post ) ) {
return new WP_Error( 'unknown_post', 'Unknown post', 404 );
}
if ( !current_user_can( 'edit_post', $post->ID ) ) {
return new WP_Error( 'unauthorized', 'User cannot edit post', 403 );
}
if ( ! empty( $input['author'] ) ) {
$author_id = $this->parse_and_set_author( $input['author'], $_post_type );
unset( $input['author'] );
if ( is_wp_error( $author_id ) )
return $author_id;
}
if ( ( isset( $input['status'] ) && 'publish' === $input['status'] ) && 'publish' !== $post->post_status && !current_user_can( 'publish_post', $post->ID ) ) {
$input['status'] = 'pending';
}
$last_status = $post->post_status;
$new_status = isset( $input['status'] ) ? $input['status'] : $last_status;
// Make sure that drafts get the current date when transitioning to publish if not supplied in the post.
$date_in_past = ( strtotime($post->post_date_gmt) < time() );
if ( 'publish' === $new_status && 'draft' === $last_status && ! isset( $input['date_gmt'] ) && $date_in_past ) {
$input['date_gmt'] = gmdate( 'Y-m-d H:i:s' );
}
}
if ( function_exists( 'wpcom_switch_to_locale' ) ) {
// fixes calypso-pre-oss #12476: respect blog locale when creating the post slug
wpcom_switch_to_locale( get_blog_lang_code( $blog_id ) );
}
// If date was set, $this->input will set date_gmt, date still needs to be adjusted for the blog's offset
if ( isset( $input['date_gmt'] ) ) {
$gmt_offset = get_option( 'gmt_offset' );
$time_with_offset = strtotime( $input['date_gmt'] ) + $gmt_offset * HOUR_IN_SECONDS;
$input['date'] = date( 'Y-m-d H:i:s', $time_with_offset );
}
if ( ! empty( $author_id ) && get_current_user_id() != $author_id ) {
if ( ! current_user_can( $post_type->cap->edit_others_posts ) ) {
return new WP_Error( 'unauthorized', "User is not allowed to publish others' posts.", 403 );
} elseif ( ! user_can( $author_id, $post_type->cap->edit_posts ) ) {
return new WP_Error( 'unauthorized', 'Assigned author cannot publish post.', 403 );
}
}
if ( !is_post_type_hierarchical( $post_type->name ) && 'revision' !== $post_type->name ) {
unset( $input['parent'] );
}
$tax_input = array();
foreach ( array( 'categories' => 'category', 'tags' => 'post_tag' ) as $key => $taxonomy ) {
if ( ! isset( $input[ $key ] ) ) {
continue;
}
$tax_input[ $taxonomy ] = array();
$is_hierarchical = is_taxonomy_hierarchical( $taxonomy );
if ( is_array( $input[$key] ) ) {
$terms = $input[$key];
} else {
$terms = explode( ',', $input[$key] );
}
foreach ( $terms as $term ) {
/**
* `curl --data 'category[]=123'` should be interpreted as a category ID,
* not a category whose name is '123'.
*
* Consequence: To add a category/tag whose name is '123', the client must
* first look up its ID.
*/
$term = (string) $term; // ctype_digit compat
if ( ctype_digit( $term ) ) {
$term = (int) $term;
}
$term_info = term_exists( $term, $taxonomy );
if ( ! $term_info ) {
// A term ID that doesn't already exist. Ignore it: we don't know what name to give it.
if ( is_int( $term ) ){
continue;
}
// only add a new tag/cat if the user has access to
$tax = get_taxonomy( $taxonomy );
// see https://core.trac.wordpress.org/ticket/26409
if ( 'category' === $taxonomy && ! current_user_can( $tax->cap->edit_terms ) ) {
continue;
} else if ( ! current_user_can( $tax->cap->assign_terms ) ) {
continue;
}
$term_info = wp_insert_term( $term, $taxonomy );
}
if ( ! is_wp_error( $term_info ) ) {
if ( $is_hierarchical ) {
// Categories must be added by ID
$tax_input[$taxonomy][] = (int) $term_info['term_id'];
} else {
// Tags must be added by name
if ( is_int( $term ) ) {
$term = get_term( $term, $taxonomy );
$tax_input[$taxonomy][] = $term->name;
} else {
$tax_input[$taxonomy][] = $term;
}
}
}
}
}
if ( isset( $input['categories'] ) && empty( $tax_input['category'] ) && 'revision' !== $post_type->name ) {
$tax_input['category'][] = get_option( 'default_category' );
}
unset( $input['tags'], $input['categories'] );
$insert = array();
if ( !empty( $input['slug'] ) ) {
$insert['post_name'] = $input['slug'];
unset( $input['slug'] );
}
if ( isset( $input['comments_open'] ) ) {
$insert['comment_status'] = ( true === $input['comments_open'] ) ? 'open' : 'closed';
}
if ( isset( $input['pings_open'] ) ) {
$insert['ping_status'] = ( true === $input['pings_open'] ) ? 'open' : 'closed';
}
unset( $input['comments_open'], $input['pings_open'] );
if ( isset( $input['menu_order'] ) ) {
$insert['menu_order'] = $input['menu_order'];
unset( $input['menu_order'] );
}
$publicize = isset( $input['publicize'] ) ? $input['publicize'] : null;
unset( $input['publicize'] );
$publicize_custom_message = isset( $input['publicize_message'] ) ? $input['publicize_message'] : null;
unset( $input['publicize_message'] );
if ( isset( $input['featured_image'] ) ) {
$featured_image = trim( $input['featured_image'] );
$delete_featured_image = empty( $featured_image );
unset( $input['featured_image'] );
}
$metadata = isset( $input['metadata'] ) ? $input['metadata'] : null;
unset( $input['metadata'] );
$likes = isset( $input['likes_enabled'] ) ? $input['likes_enabled'] : null;
unset( $input['likes_enabled'] );
$sharing = isset( $input['sharing_enabled'] ) ? $input['sharing_enabled'] : null;
unset( $input['sharing_enabled'] );
$sticky = isset( $input['sticky'] ) ? $input['sticky'] : null;
unset( $input['sticky'] );
foreach ( $input as $key => $value ) {
$insert["post_$key"] = $value;
}
if ( ! empty( $author_id ) ) {
$insert['post_author'] = absint( $author_id );
}
if ( ! empty( $tax_input ) ) {
$insert['tax_input'] = $tax_input;
}
$has_media = isset( $input['media'] ) && $input['media'] ? count( $input['media'] ) : false;
$has_media_by_url = isset( $input['media_urls'] ) && $input['media_urls'] ? count( $input['media_urls'] ) : false;
if ( $new ) {
if ( isset( $input['content'] ) && ! has_shortcode( $input['content'], 'gallery' ) && ( $has_media || $has_media_by_url ) ) {
switch ( ( $has_media + $has_media_by_url ) ) {
case 0 :
// No images - do nothing.
break;
case 1 :
// 1 image - make it big
$insert['post_content'] = $input['content'] = "[gallery size=full columns=1]\n\n" . $input['content'];
break;
default :
// Several images - 3 column gallery
$insert['post_content'] = $input['content'] = "[gallery]\n\n" . $input['content'];
break;
}
}
$post_id = wp_insert_post( add_magic_quotes( $insert ), true );
} else {
$insert['ID'] = $post->ID;
// wp_update_post ignores date unless edit_date is set
// See: http://codex.wordpress.org/Function_Reference/wp_update_post#Scheduling_posts
// See: https://core.trac.wordpress.org/browser/tags/3.9.2/src/wp-includes/post.php#L3302
if ( isset( $input['date_gmt'] ) || isset( $input['date'] ) ) {
$insert['edit_date'] = true;
}
// this two-step process ensures any changes submitted along with status=trash get saved before trashing
if ( isset( $input['status'] ) && 'trash' === $input['status'] ) {
// if we insert it with status='trash', it will get double-trashed, so insert it as a draft first
unset( $insert['status'] );
$post_id = wp_update_post( (object) $insert );
// now call wp_trash_post so post_meta gets set and any filters get called
wp_trash_post( $post_id );
} else {
$post_id = wp_update_post( (object) $insert );
}
}
if ( !$post_id || is_wp_error( $post_id ) ) {
return $post_id;
}
// make sure this post actually exists and is not an error of some kind (ie, trying to load media in the posts endpoint)
$post_check = $this->get_post_by( 'ID', $post_id, $args['context'] );
if ( is_wp_error( $post_check ) ) {
return $post_check;
}
if ( $has_media ) {
$this->api->trap_wp_die( 'upload_error' );
foreach ( $input['media'] as $media_item ) {
$_FILES['.api.media.item.'] = $media_item;
// check for WP_Error if we ever actually need $media_id
$media_id = media_handle_upload( '.api.media.item.', $post_id );
}
$this->api->trap_wp_die( null );
unset( $_FILES['.api.media.item.'] );
}
if ( $has_media_by_url ) {
foreach ( $input['media_urls'] as $url ) {
$this->handle_media_sideload( $url, $post_id );
}
}
// Set like status for the post
/** This filter is documented in modules/likes.php */
$sitewide_likes_enabled = (bool) apply_filters( 'wpl_is_enabled_sitewide', ! get_option( 'disabled_likes' ) );
if ( $new ) {
if ( $sitewide_likes_enabled ) {
if ( false === $likes ) {
update_post_meta( $post_id, 'switch_like_status', 0 );
} else {
delete_post_meta( $post_id, 'switch_like_status' );
}
} else {
if ( $likes ) {
update_post_meta( $post_id, 'switch_like_status', 1 );
} else {
delete_post_meta( $post_id, 'switch_like_status' );
}
}
} else {
if ( isset( $likes ) ) {
if ( $sitewide_likes_enabled ) {
if ( false === $likes ) {
update_post_meta( $post_id, 'switch_like_status', 0 );
} else {
delete_post_meta( $post_id, 'switch_like_status' );
}
} else {
if ( true === $likes ) {
update_post_meta( $post_id, 'switch_like_status', 1 );
} else {
delete_post_meta( $post_id, 'switch_like_status' );
}
}
}
}
// Set sharing status of the post
if ( $new ) {
$sharing_enabled = isset( $sharing ) ? (bool) $sharing : true;
if ( false === $sharing_enabled ) {
update_post_meta( $post_id, 'sharing_disabled', 1 );
}
}
else {
if ( isset( $sharing ) && true === $sharing ) {
delete_post_meta( $post_id, 'sharing_disabled' );
} else if ( isset( $sharing ) && false == $sharing ) {
update_post_meta( $post_id, 'sharing_disabled', 1 );
}
}
if ( isset( $sticky ) ) {
if ( true === $sticky ) {
stick_post( $post_id );
} else {
unstick_post( $post_id );
}
}
// WPCOM Specific (Jetpack's will get bumped elsewhere
// Tracks how many posts are published and sets meta
// so we can track some other cool stats (like likes & comments on posts published)
if ( defined( 'IS_WPCOM' ) && IS_WPCOM ) {
if (
( $new && 'publish' == $input['status'] )
|| (
! $new && isset( $last_status )
&& 'publish' != $last_status
&& isset( $new_status )
&& 'publish' == $new_status
)
) {
/** This action is documented in modules/widgets/social-media-icons.php */
do_action( 'jetpack_bump_stats_extras', 'api-insights-posts', $this->api->token_details['client_id'] );
update_post_meta( $post_id, '_rest_api_published', 1 );
update_post_meta( $post_id, '_rest_api_client_id', $this->api->token_details['client_id'] );
}
}
// We ask the user/dev to pass Publicize services he/she wants activated for the post, but Publicize expects us
// to instead flag the ones we don't want to be skipped. proceed with said logic.
// any posts coming from Path (client ID 25952) should also not publicize
if ( $publicize === false || ( isset( $this->api->token_details['client_id'] ) && 25952 == $this->api->token_details['client_id'] ) ) {
// No publicize at all, skip all by ID
foreach ( $GLOBALS['publicize_ui']->publicize->get_services( 'all' ) as $name => $service ) {
delete_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $name );
$service_connections = $GLOBALS['publicize_ui']->publicize->get_connections( $name );
if ( ! $service_connections ) {
continue;
}
foreach ( $service_connections as $service_connection ) {
update_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $service_connection->unique_id, 1 );
}
}
} else if ( is_array( $publicize ) && ( count ( $publicize ) > 0 ) ) {
foreach ( $GLOBALS['publicize_ui']->publicize->get_services( 'all' ) as $name => $service ) {
/*
* We support both indexed and associative arrays:
* * indexed are to pass entire services
* * associative are to pass specific connections per service
*
* We do support mixed arrays: mixed integer and string keys (see 3rd example below).
*
* EG: array( 'twitter', 'facebook') will only publicize to those, ignoring the other available services
* Form data: publicize[]=twitter&publicize[]=facebook
* EG: array( 'twitter' => '(int) $pub_conn_id_0, (int) $pub_conn_id_3', 'facebook' => (int) $pub_conn_id_7 ) will publicize to two Twitter accounts, and one Facebook connection, of potentially many.
* Form data: publicize[twitter]=$pub_conn_id_0,$pub_conn_id_3&publicize[facebook]=$pub_conn_id_7
* EG: array( 'twitter', 'facebook' => '(int) $pub_conn_id_0, (int) $pub_conn_id_3' ) will publicize to all available Twitter accounts, but only 2 of potentially many Facebook connections
* Form data: publicize[]=twitter&publicize[facebook]=$pub_conn_id_0,$pub_conn_id_3
*/
// Delete any stale SKIP value for the service by name. We'll add it back by ID.
delete_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $name );
// Get the user's connections
$service_connections = $GLOBALS['publicize_ui']->publicize->get_connections( $name );
// if the user doesn't have any connections for this service, move on
if ( ! $service_connections ) {
continue;
}
if ( !in_array( $name, $publicize ) && !array_key_exists( $name, $publicize ) ) {
// Skip the whole service by adding each connection ID
foreach ( $service_connections as $service_connection ) {
update_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $service_connection->unique_id, 1 );
}
} else if ( !empty( $publicize[ $name ] ) ) {
// Seems we're being asked to only push to [a] specific connection[s].
// Explode the list on commas, which will also support a single passed ID
$requested_connections = explode( ',', ( preg_replace( '/[\s]*/', '', $publicize[ $name ] ) ) );
// Flag the connections we can't match with the requested list to be skipped.
foreach ( $service_connections as $service_connection ) {
if ( !in_array( $service_connection->meta['connection_data']->id, $requested_connections ) ) {
update_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $service_connection->unique_id, 1 );
} else {
delete_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $service_connection->unique_id );
}
}
} else {
// delete all SKIP values; it's okay to publish to all connected IDs for this service
foreach ( $service_connections as $service_connection ) {
delete_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_SKIP . $service_connection->unique_id );
}
}
}
}
if ( ! is_null( $publicize_custom_message ) ) {
if ( empty( $publicize_custom_message ) ) {
delete_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_MESS );
} else {
update_post_meta( $post_id, $GLOBALS['publicize_ui']->publicize->POST_MESS, trim( $publicize_custom_message ) );
}
}
if ( ! empty( $insert['post_format'] ) ) {
if ( 'default' !== strtolower( $insert['post_format'] ) ) {
set_post_format( $post_id, $insert['post_format'] );
}
else {
set_post_format( $post_id, get_option( 'default_post_format' ) );
}
}
if ( isset( $featured_image ) ) {
$this->parse_and_set_featured_image( $post_id, $delete_featured_image, $featured_image );
}
if ( ! empty( $metadata ) ) {
foreach ( (array) $metadata as $meta ) {
$meta = (object) $meta;
// Custom meta description can only be set on sites that have a business subscription.
if ( Jetpack_SEO_Posts::DESCRIPTION_META_KEY == $meta->key && ! Jetpack_SEO_Utils::is_enabled_jetpack_seo() ) {
return new WP_Error( 'unauthorized', __( 'SEO tools are not enabled for this site.', 'jetpack' ), 403 );
}
$existing_meta_item = new stdClass;
if ( empty( $meta->operation ) )
$meta->operation = 'update';
if ( ! empty( $meta->value ) ) {
if ( 'true' == $meta->value )
$meta->value = true;
if ( 'false' == $meta->value )
$meta->value = false;
}
if ( ! empty( $meta->id ) ) {
$meta->id = absint( $meta->id );
$existing_meta_item = get_metadata_by_mid( 'post', $meta->id );
if ( $post_id !== (int) $existing_meta_item->post_id ) {
// Only allow updates for metadata on this post
continue;
}
}
$unslashed_meta_key = wp_unslash( $meta->key ); // should match what the final key will be
$meta->key = wp_slash( $meta->key );
$unslashed_existing_meta_key = wp_unslash( $existing_meta_item->meta_key );
$existing_meta_item->meta_key = wp_slash( $existing_meta_item->meta_key );
// make sure that the meta id passed matches the existing meta key
if ( ! empty( $meta->id ) && ! empty( $meta->key ) ) {
$meta_by_id = get_metadata_by_mid( 'post', $meta->id );
if ( $meta_by_id->meta_key !== $meta->key ) {
continue; // skip this meta
}
}
switch ( $meta->operation ) {
case 'delete':
if ( ! empty( $meta->id ) && ! empty( $existing_meta_item->meta_key ) && current_user_can( 'delete_post_meta', $post_id, $unslashed_existing_meta_key ) ) {
delete_metadata_by_mid( 'post', $meta->id );
} elseif ( ! empty( $meta->key ) && ! empty( $meta->previous_value ) && current_user_can( 'delete_post_meta', $post_id, $unslashed_meta_key ) ) {
delete_post_meta( $post_id, $meta->key, $meta->previous_value );
} elseif ( ! empty( $meta->key ) && current_user_can( 'delete_post_meta', $post_id, $unslashed_meta_key ) ) {
delete_post_meta( $post_id, $meta->key );
}
break;
case 'add':
if ( ! empty( $meta->id ) || ! empty( $meta->previous_value ) ) {
break;
} elseif ( ! empty( $meta->key ) && ! empty( $meta->value ) && ( current_user_can( 'add_post_meta', $post_id, $unslashed_meta_key ) ) || WPCOM_JSON_API_Metadata::is_public( $meta->key ) ) {
add_post_meta( $post_id, $meta->key, $meta->value );
}
break;
case 'update':
if ( ! isset( $meta->value ) ) {
break;
} elseif ( ! empty( $meta->id ) && ! empty( $existing_meta_item->meta_key ) && ( current_user_can( 'edit_post_meta', $post_id, $unslashed_existing_meta_key ) || WPCOM_JSON_API_Metadata::is_public( $meta->key ) ) ) {
update_metadata_by_mid( 'post', $meta->id, $meta->value );
} elseif ( ! empty( $meta->key ) && ! empty( $meta->previous_value ) && ( current_user_can( 'edit_post_meta', $post_id, $unslashed_meta_key ) || WPCOM_JSON_API_Metadata::is_public( $meta->key ) ) ) {
update_post_meta( $post_id, $meta->key,$meta->value, $meta->previous_value );
} elseif ( ! empty( $meta->key ) && ( current_user_can( 'edit_post_meta', $post_id, $unslashed_meta_key ) || WPCOM_JSON_API_Metadata::is_public( $meta->key ) ) ) {
update_post_meta( $post_id, $meta->key, $meta->value );
}
break;
}
}
}
/**
* Fires when a post is created via the REST API.
*
* @module json-api
*
* @since 2.3.0
*
* @param int $post_id Post ID.
* @param array $insert Data used to build the post.
* @param string $new New post URL suffix.
*/
do_action( 'rest_api_inserted_post', $post_id, $insert, $new );
$return = $this->get_post_by( 'ID', $post_id, $args['context'] );
if ( !$return || is_wp_error( $return ) ) {
return $return;
}
if ( isset( $input['type'] ) && 'revision' === $input['type'] ) {
$return['preview_nonce'] = wp_create_nonce( 'post_preview_' . $input['parent'] );
}
if ( isset( $sticky ) ) {
// workaround for sticky test occasionally failing, maybe a race condition with stick_post() above
$return['sticky'] = ( true === $sticky );
}
/** This action is documented in json-endpoints/class.wpcom-json-api-site-settings-endpoint.php */
do_action( 'wpcom_json_api_objects', 'posts' );
return $return;
}
// /sites/%s/posts/%d/delete -> $blog_id, $post_id
function delete_post( $path, $blog_id, $post_id ) {
$post = get_post( $post_id );
if ( !$post || is_wp_error( $post ) ) {
return new WP_Error( 'unknown_post', 'Unknown post', 404 );
}
if ( ! $this->is_post_type_allowed( $post->post_type ) ) {
return new WP_Error( 'unknown_post_type', 'Unknown post type', 404 );
}
if ( !current_user_can( 'delete_post', $post->ID ) ) {
return new WP_Error( 'unauthorized', 'User cannot delete posts', 403 );
}
$args = $this->query_args();
$return = $this->get_post_by( 'ID', $post->ID, $args['context'] );
if ( !$return || is_wp_error( $return ) ) {
return $return;
}
/** This action is documented in json-endpoints/class.wpcom-json-api-site-settings-endpoint.php */
do_action( 'wpcom_json_api_objects', 'posts' );
// we need to call wp_trash_post so that untrash will work correctly for all post types
if ( 'trash' === $post->post_status )
wp_delete_post( $post->ID );
else
wp_trash_post( $post->ID );
$status = get_post_status( $post->ID );
if ( false === $status ) {
$return['status'] = 'deleted';
return $return;
}
return $this->get_post_by( 'ID', $post->ID, $args['context'] );
}
// /sites/%s/posts/%d/restore -> $blog_id, $post_id
function restore_post( $path, $blog_id, $post_id ) {
$args = $this->query_args();
$post = get_post( $post_id );
if ( !$post || is_wp_error( $post ) ) {
return new WP_Error( 'unknown_post', 'Unknown post', 404 );
}
if ( !current_user_can( 'delete_post', $post->ID ) ) {
return new WP_Error( 'unauthorized', 'User cannot restore trashed posts', 403 );
}
/** This action is documented in json-endpoints/class.wpcom-json-api-site-settings-endpoint.php */
do_action( 'wpcom_json_api_objects', 'posts' );
wp_untrash_post( $post->ID );
return $this->get_post_by( 'ID', $post->ID, $args['context'] );
}
private function parse_and_set_featured_image( $post_id, $delete_featured_image, $featured_image ) {
if ( $delete_featured_image ) {
delete_post_thumbnail( $post_id );
return;
}
$featured_image = (string) $featured_image;
// if we got a post ID, we can just set it as the thumbnail
if ( ctype_digit( $featured_image ) && 'attachment' == get_post_type( $featured_image ) ) {
set_post_thumbnail( $post_id, $featured_image );
return $featured_image;
}
$featured_image_id = $this->handle_media_sideload( $featured_image, $post_id, 'image' );
if ( empty( $featured_image_id ) || ! is_int( $featured_image_id ) )
return false;
set_post_thumbnail( $post_id, $featured_image_id );
return $featured_image_id;
}
private function parse_and_set_author( $author = null, $post_type = 'post' ) {
if ( empty( $author ) || ! post_type_supports( $post_type, 'author' ) )
return get_current_user_id();
$author = (string) $author;
if ( ctype_digit( $author ) ) {
$_user = get_user_by( 'id', $author );
if ( ! $_user || is_wp_error( $_user ) )
return new WP_Error( 'invalid_author', 'Invalid author provided' );
return $_user->ID;
}
$_user = get_user_by( 'login', $author );
if ( ! $_user || is_wp_error( $_user ) )
return new WP_Error( 'invalid_author', 'Invalid author provided' );
return $_user->ID;
}
}
|
sifonsecac/capitalino-errante
|
wp-content/plugins/jetpack/json-endpoints/class.wpcom-json-api-update-post-endpoint.php
|
PHP
|
apache-2.0
| 37,790 | 40.031488 | 627 | 0.603572 | false |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_36) on Sat Jan 23 15:54:20 CST 2016 -->
<title>AppMasterToMaster.StallingTasks$</title>
<meta name="date" content="2016-01-23">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="AppMasterToMaster.StallingTasks$";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks.html" title="class in io.gearpump.streaming"><span class="strong">PREV CLASS</span></a></li>
<li><a href="../../../io/gearpump/streaming/AppMasterToMaster$.html" title="class in io.gearpump.streaming"><span class="strong">NEXT CLASS</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html" target="_top">FRAMES</a></li>
<li><a href="AppMasterToMaster.StallingTasks$.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>SUMMARY: </li>
<li>NESTED | </li>
<li><a href="#field_summary">FIELD</a> | </li>
<li><a href="#constructor_summary">CONSTR</a> | </li>
<li><a href="#methods_inherited_from_class_scala.runtime.AbstractFunction1">METHOD</a></li>
</ul>
<ul class="subNavList">
<li>DETAIL: </li>
<li><a href="#field_detail">FIELD</a> | </li>
<li><a href="#constructor_detail">CONSTR</a> | </li>
<li>METHOD</li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<p class="subTitle">io.gearpump.streaming</p>
<h2 title="Class AppMasterToMaster.StallingTasks$" class="title">Class AppMasterToMaster.StallingTasks$</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>scala.runtime.AbstractFunction1<scala.collection.immutable.List<<a href="../../../io/gearpump/streaming/task/TaskId.html" title="class in io.gearpump.streaming.task">TaskId</a>>,<a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks.html" title="class in io.gearpump.streaming">AppMasterToMaster.StallingTasks</a>></li>
<li>
<ul class="inheritance">
<li>io.gearpump.streaming.AppMasterToMaster.StallingTasks$</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd>java.io.Serializable, scala.Function1<scala.collection.immutable.List<<a href="../../../io/gearpump/streaming/task/TaskId.html" title="class in io.gearpump.streaming.task">TaskId</a>>,<a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks.html" title="class in io.gearpump.streaming">AppMasterToMaster.StallingTasks</a>></dd>
</dl>
<dl>
<dt>Enclosing class:</dt>
<dd><a href="../../../io/gearpump/streaming/AppMasterToMaster.html" title="class in io.gearpump.streaming">AppMasterToMaster</a></dd>
</dl>
<hr>
<br>
<pre>public static class <strong>AppMasterToMaster.StallingTasks$</strong>
extends scala.runtime.AbstractFunction1<scala.collection.immutable.List<<a href="../../../io/gearpump/streaming/task/TaskId.html" title="class in io.gearpump.streaming.task">TaskId</a>>,<a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks.html" title="class in io.gearpump.streaming">AppMasterToMaster.StallingTasks</a>>
implements scala.Serializable</pre>
<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../serialized-form.html#io.gearpump.streaming.AppMasterToMaster.StallingTasks$">Serialized Form</a></dd></dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== FIELD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="field_summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
<caption><span>Fields</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html" title="class in io.gearpump.streaming">AppMasterToMaster.StallingTasks$</a></code></td>
<td class="colLast"><code><strong><a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html#MODULE$">MODULE$</a></strong></code>
<div class="block">Static reference to the singleton instance of this Scala object.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html#AppMasterToMaster.StallingTasks$()">AppMasterToMaster.StallingTasks$</a></strong>()</code> </td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_scala.runtime.AbstractFunction1">
<!-- -->
</a>
<h3>Methods inherited from class scala.runtime.AbstractFunction1</h3>
<code>andThen, apply$mcDD$sp, apply$mcDF$sp, apply$mcDI$sp, apply$mcDJ$sp, apply$mcFD$sp, apply$mcFF$sp, apply$mcFI$sp, apply$mcFJ$sp, apply$mcID$sp, apply$mcIF$sp, apply$mcII$sp, apply$mcIJ$sp, apply$mcJD$sp, apply$mcJF$sp, apply$mcJI$sp, apply$mcJJ$sp, apply$mcVD$sp, apply$mcVF$sp, apply$mcVI$sp, apply$mcVJ$sp, apply$mcZD$sp, apply$mcZF$sp, apply$mcZI$sp, apply$mcZJ$sp, compose, toString</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_scala.Function1">
<!-- -->
</a>
<h3>Methods inherited from interface scala.Function1</h3>
<code>apply</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ FIELD DETAIL =========== -->
<ul class="blockList">
<li class="blockList"><a name="field_detail">
<!-- -->
</a>
<h3>Field Detail</h3>
<a name="MODULE$">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>MODULE$</h4>
<pre>public static final <a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html" title="class in io.gearpump.streaming">AppMasterToMaster.StallingTasks$</a> MODULE$</pre>
<div class="block">Static reference to the singleton instance of this Scala object.</div>
</li>
</ul>
</li>
</ul>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="AppMasterToMaster.StallingTasks$()">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>AppMasterToMaster.StallingTasks$</h4>
<pre>public AppMasterToMaster.StallingTasks$()</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../io/gearpump/streaming/AppMasterToMaster.StallingTasks.html" title="class in io.gearpump.streaming"><span class="strong">PREV CLASS</span></a></li>
<li><a href="../../../io/gearpump/streaming/AppMasterToMaster$.html" title="class in io.gearpump.streaming"><span class="strong">NEXT CLASS</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html" target="_top">FRAMES</a></li>
<li><a href="AppMasterToMaster.StallingTasks$.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>SUMMARY: </li>
<li>NESTED | </li>
<li><a href="#field_summary">FIELD</a> | </li>
<li><a href="#constructor_summary">CONSTR</a> | </li>
<li><a href="#methods_inherited_from_class_scala.runtime.AbstractFunction1">METHOD</a></li>
</ul>
<ul class="subNavList">
<li>DETAIL: </li>
<li><a href="#field_detail">FIELD</a> | </li>
<li><a href="#constructor_detail">CONSTR</a> | </li>
<li>METHOD</li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
stanleyxu2005/gearpump.github.io
|
releases/0.7.5/api/java/io/gearpump/streaming/AppMasterToMaster.StallingTasks$.html
|
HTML
|
apache-2.0
| 11,488 | 38.07483 | 404 | 0.665042 | false |
/**
* Copyright (C) 2002 Michel Ishizuka All rights reserved.
*
* 以下の条件に同意するならばソースとバイナリ形式の再配布と使用を
* 変更の有無にかかわらず許可する。
*
* 1.ソースコードの再配布において著作権表示と この条件のリスト
* および下記の声明文を保持しなくてはならない。
*
* 2.バイナリ形式の再配布において著作権表示と この条件のリスト
* および下記の声明文を使用説明書もしくは その他の配布物内に
* 含む資料に記述しなければならない。
*
* このソフトウェアは石塚美珠瑠によって無保証で提供され、特定の目
* 的を達成できるという保証、商品価値が有るという保証にとどまらず、
* いかなる明示的および暗示的な保証もしない。
* 石塚美珠瑠は このソフトウェアの使用による直接的、間接的、偶発
* 的、特殊な、典型的な、あるいは必然的な損害(使用によるデータの
* 損失、業務の中断や見込まれていた利益の遺失、代替製品もしくは
* サービスの導入費等が考えられるが、決してそれだけに限定されない
* 損害)に対して、いかなる事態の原因となったとしても、契約上の責
* 任や無過失責任を含む いかなる責任があろうとも、たとえそれが不
* 正行為のためであったとしても、またはそのような損害の可能性が報
* 告されていたとしても一切の責任を負わないものとする。
*/
package com.orangesignal.jlha;
/**
* 試作プログラム ar940528 や LHa for Unix で使用されているハッシュ関数。<br>
* gzip で使用されているを参考にしたようだ。<br>
*
* <pre>
* -- revision history --
* $Log: HashDefault.java,v $
* Revision 1.0 2002/08/05 00:00:00 dangan
* add to version control
* [change]
* HashMethod のインタフェイス変更にあわせてインテフェイス変更。
* コンストラクタで引数チェックを削除。
* [maintanance]
* ソース整備
* タブ廃止
* ライセンス文の修正
*
* </pre>
*
* @author $Author: dangan $
* @version $Revision: 1.0 $
*/
public class HashDefault implements HashMethod {
/**
* LZSS圧縮を施すためのバッファ。 前半は辞書領域、 後半は圧縮を施すためのデータの入ったバッファ。 HashMethodの実装内では Hash値の生成のための読み込みにのみ使用する。
*/
private byte[] textBuffer;
// ------------------------------------------------------------------
// Constructor
/**
* ar940528 や LHa for Unix で使用されているハッシュ関数を構築する。
*
* @param textBuffer LZSS圧縮用のバッファ。 Hash値生成のため読み込み用に使用する。
*/
public HashDefault(final byte[] textBuffer) {
this.textBuffer = textBuffer;
}
// ------------------------------------------------------------------
// method of jp.gr.java_conf.dangan.util.lha.HashMethod
/**
* ハッシュ値を生成するためのマスク値
*/
private static final int HASH_MASK = 0x7FFF;
/**
* ハッシュ関数。<br>
* コンストラクタで渡された TextBuffer の position からの データパタンの hash値を生成する。
*
* @param position データパタンの開始位置
* @return ハッシュ値
*/
@Override
public int hash(final int position) {
return ((textBuffer[position] << 5 ^ textBuffer[position + 1] & 0xFF) << 5 ^ textBuffer[position + 2] & 0xFF) & HASH_MASK;
}
/**
* ハッシュ関数がハッシュ値を生成するために使用するバイト数を得る。<br>
* このハッシュ関数は 3バイトのデータから シフトとXORを使用してハッシュ値を生成するため、 このメソッドは常に 3 を返す。
*
* @return 常に 3
*/
@Override
public int hashRequires() {
return 3;
}
/**
* ハッシュテーブルのサイズを得る。<br>
* このハッシュ関数は 0x0000 ~ 0x7FFF のハッシュ値を生成するため、 このメソッドは常に 0x8000(32768) を返す。
*
* @return 常に 0x8000(32768)
*/
@Override
public int tableSize() {
return 0x8000;
}
}
|
Koichi-Kobayashi/orangesignal-csv
|
src/main/java/com/orangesignal/jlha/HashDefault.java
|
Java
|
apache-2.0
| 4,530 | 21.910714 | 124 | 0.633034 | false |
import transformCss from '..'
it('transforms border color with multiple values', () => {
expect(transformCss([['border-color', 'red yellow green blue']])).toEqual({
borderTopColor: 'red',
borderRightColor: 'yellow',
borderBottomColor: 'green',
borderLeftColor: 'blue',
})
})
it('transforms border color with hex color', () => {
expect(transformCss([['border-color', '#f00']])).toEqual({
borderBottomColor: '#f00',
borderLeftColor: '#f00',
borderRightColor: '#f00',
borderTopColor: '#f00',
})
})
it('transforms border color with rgb color', () => {
expect(transformCss([['border-color', 'rgb(255, 0, 0)']])).toEqual({
borderBottomColor: 'rgb(255, 0, 0)',
borderLeftColor: 'rgb(255, 0, 0)',
borderRightColor: 'rgb(255, 0, 0)',
borderTopColor: 'rgb(255, 0, 0)',
})
})
it('transforms border color with rgba color', () => {
expect(transformCss([['border-color', 'rgba(255, 0, 0, 0.1)']])).toEqual({
borderBottomColor: 'rgba(255, 0, 0, 0.1)',
borderLeftColor: 'rgba(255, 0, 0, 0.1)',
borderRightColor: 'rgba(255, 0, 0, 0.1)',
borderTopColor: 'rgba(255, 0, 0, 0.1)',
})
})
|
BigBoss424/portfolio
|
v8/development/node_modules/css-to-react-native/src/__tests__/borderColor.js
|
JavaScript
|
apache-2.0
| 1,150 | 30.081081 | 77 | 0.614783 | false |