code
stringlengths 0
30.8k
| source
stringclasses 6
values | language
stringclasses 9
values | __index_level_0__
int64 0
100k
|
---|---|---|---|
public override void SetPosition(Vector3 newPosition)
{
float timeStamp = Grid.Instance.GetHitTime(newPosition);
HitObjectManager.EditFruitTimeStamp(this, Mathf.RoundToInt(timeStamp));
newPosition.x = Mathf.Clamp(newPosition.x, 0, Grid.Instance.width);
SetXPosition(newPosition.x);
transform.SetGlobalPivot(newPosition);
transform.position += Grid.Instance.transform.position;
} | function | c# | 600 |
func readRecentBuilds() {
n, err := treeSize()
if err != nil {
log.Fatalf("getting sum tree size: %v", err)
}
first := int64(0)
if n > 1000 {
first = n - 1000
n = 1000
}
if n == 0 {
return
}
records, err := serverOps{}.ReadRecords(context.Background(), first, n)
if err != nil {
log.Fatalf("reading records: %v", err)
}
links := []string{}
keepFrom := len(records) - 10
if keepFrom < 0 {
keepFrom = 0
}
for i, record := range records {
br, err := parseRecord(record)
if err != nil {
log.Fatalf("bad record: %v", err)
}
targets.use[br.Goos+"/"+br.Goarch]++
if i < keepFrom {
continue
}
link := request{br.buildSpec, br.Sum, pageIndex}.link()
links = append(links, link)
}
targets.sort()
recentBuilds.links = links
} | function | go | 601 |
def main(args):
global prefix_dreams
global primitives
visualizeCheckpoint = args.pop("visualize")
if visualizeCheckpoint is not None:
with open(visualizeCheckpoint,'rb') as handle:
primitives = pickle.load(handle).grammars[-1].primitives
visualizePrimitives(primitives)
sys.exit(0)
dreamCheckpoint = args.pop("dreamCheckpoint")
dreamDirectory = args.pop("dreamDirectory")
proto = args.pop("proto")
if dreamCheckpoint is not None:
enumerateDreams(dreamCheckpoint, dreamDirectory)
sys.exit(0)
animateCheckpoint = args.pop("animate")
if animateCheckpoint is not None:
animateSolutions(loadPickle(animateCheckpoint).allFrontiers)
sys.exit(0)
target = args.pop("target")
red = args.pop("reduce")
save = args.pop("save")
prefix = args.pop("prefix")
prefix_dreams = prefix + "/dreams/" + ('_'.join(target)) + "/"
prefix_pickles = prefix + "/logo." + ('.'.join(target))
if not os.path.exists(prefix_dreams):
os.makedirs(prefix_dreams)
tasks = makeTasks(target, proto)
eprint("Generated", len(tasks), "tasks")
costMatters = args.pop("cost")
for t in tasks:
t.specialTask[1]["costMatters"] = costMatters
if costMatters: t.examples = [(([1]), t.examples[0][1])]
os.chdir("prototypical-networks")
subprocess.Popen(["python","./protonet_server.py"])
time.sleep(3)
os.chdir("..")
test, train = testTrainSplit(tasks, args.pop("split"))
eprint("Split tasks into %d/%d test/train" % (len(test), len(train)))
try:
if test: montageTasks(test,"test_")
montageTasks(train,"train_")
except:
eprint("WARNING: couldn't generate montage. Do you have an old version of scipy?")
if red is not []:
for reducing in red:
try:
with open(reducing, 'r') as f:
prods = json.load(f)
for e in prods:
e = Program.parse(e)
if e.isInvented:
primitives.append(e)
except EOFError:
eprint("Couldn't grab frontier from " + reducing)
except IOError:
eprint("Couldn't grab frontier from " + reducing)
except json.decoder.JSONDecodeError:
eprint("Couldn't grab frontier from " + reducing)
primitives = list(OrderedDict((x, True) for x in primitives).keys())
baseGrammar = Grammar.uniform(primitives, continuationType=turtle)
eprint(baseGrammar)
timestamp = datetime.datetime.now().isoformat()
outputDirectory = "experimentOutputs/logo/%s"%timestamp
os.system("mkdir -p %s"%outputDirectory)
generator = ecIterator(baseGrammar, train,
testingTasks=test,
outputPrefix="%s/logo"%outputDirectory,
evaluationTimeout=0.01,
**args)
r = None
for result in generator:
iteration = len(result.learningCurve)
dreamDirectory = "%s/dreams_%d"%(outputDirectory, iteration)
os.system("mkdir -p %s"%dreamDirectory)
eprint("Dreaming into directory",dreamDirectory)
dreamFromGrammar(result.grammars[-1],
dreamDirectory)
r = result
needsExport = [str(z)
for _, _, z
in r.grammars[-1].productions
if z.isInvented]
if save is not None:
with open(save, 'w') as f:
json.dump(needsExport, f) | function | python | 602 |
def strain_vorticity_fftsqnorm_grid(box_size, centre, Ncases, time, dt,
w_traj, u_traj):
ugrid = displacement_grid(box_size, centre, Ncases, time, dt,
w_traj, u_traj)
FFTugrid = np.fft.fft2(ugrid, axes=(0, 1))
wave_vectors = wave_vectors_2D(Ncases, Ncases, d=box_size/Ncases)
eikxm1 = np.exp(1j*wave_vectors[:, :, 0]) - 1
eikym1 = np.exp(1j*wave_vectors[:, :, 1]) - 1
FFTsgrid = (eikxm1*FFTugrid[:, :, 1] + eikym1*FFTugrid[:, :, 0])/2
FFTcgrid = eikxm1*FFTugrid[:, :, 1] - eikym1*FFTugrid[:, :, 0]
return np.conj(FFTsgrid)*FFTsgrid, np.conj(FFTcgrid)*FFTcgrid | function | python | 603 |
def integrate_step(self, t, x, u):
cnext = x[:3] + self.dt * x[3:6]
vnext = x[3:6] + (self.dt/self.mass) * (self.active_contacts[t]*u[:3] - self.weight)
qnext = self.integrate_quaternion(x[6:10], self.dt * x[10:13])
R = self.quaternion_to_rotation(x[6:10])
factor = linalg.cho_factor(R.dot(self.inertia_com_frame).dot(R.T))
wnext = x[10:13] + self.dt * linalg.cho_solve(factor, self.active_contacts[t]*u[3:]-np.cross(x[10:13],
np.dot(R.dot(self.inertia_com_frame).dot(R.T),x[10:13])))
return np.hstack([cnext, vnext, qnext, wnext]) | function | python | 604 |
private static Type ResolveType(string assemblyName, TypeNameParser typeName, ICollection<Assembly> assemblies)
{
var type = ResolveNonGenericType(assemblyName, typeName.GetFullName(), assemblies);
if (type == null)
{
return null;
}
if (type.IsGenericTypeDefinition && typeName.Generics != null)
{
var genArgs = typeName.Generics.Select(x => ResolveType(assemblyName, x, assemblies)).ToArray();
return type.MakeGenericType(genArgs);
}
return type;
} | function | c# | 605 |
class DecompIO:
"""
Create a ``stream`` wrapper which allows transparent decompression of
compressed data in another *stream*. This allows to process compressed
streams with data larger than available heap size. In addition to
values described in :func:``decompress``, *wbits* may take values
24..31 (16 + 8..15), meaning that input stream has gzip header.
"""
def read(self) -> int:
pass
def readinto(self):
pass
def readline(self):
pass | class | python | 606 |
class MockChrome {
public:
MockChrome(base::win::ScopedHandle request_read_handle,
base::win::ScopedHandle response_write_handle)
: request_read_handle_(std::move(request_read_handle)),
response_write_handle_(std::move(response_write_handle)) {}
void CancelAllOperations() {
::CancelIoEx(request_read_handle_.Get(), nullptr);
::CancelIoEx(response_write_handle_.Get(), nullptr);
}
template <typename ValueType>
bool ReadValue(ValueType* value) {
uint32_t read_size = sizeof(*value);
DWORD bytes_read = 0;
bool success = ::ReadFile(request_read_handle_.Get(), value, read_size,
&bytes_read, nullptr);
if (!success) {
PLOG(ERROR) << "Could not read value.";
return false;
}
if (bytes_read != read_size) {
LOG(ERROR) << "Read the wrong number of bytes: " << bytes_read
<< ". Should have been: " << read_size;
return false;
}
return true;
}
bool ReadRequest(uint32_t request_length,
chrome_cleaner::ChromePromptRequest* request) {
DCHECK(request_read_handle_.IsValid());
DWORD bytes_read = 0;
std::string request_content;
bool success =
::ReadFile(request_read_handle_.Get(),
base::WriteInto(&request_content, request_length + 1),
request_length, &bytes_read, nullptr);
if (!success) {
PLOG(ERROR) << "Could not read request.";
return false;
}
if (bytes_read != request_length) {
LOG(ERROR) << "Read the wrong number of bytes: " << bytes_read
<< ". Should have been: " << request_length;
return false;
}
if (!request->ParseFromString(request_content)) {
LOG(ERROR) << "Could not parse request.";
return false;
}
return true;
}
template <typename T>
bool WriteByValue(T value) {
DWORD bytes_written = 0;
bool success = ::WriteFile(response_write_handle_.Get(), &value,
sizeof(value), &bytes_written, nullptr);
if (!success) {
PLOG(ERROR) << "Could not write to pipe.";
return false;
}
if (bytes_written != sizeof(value)) {
LOG(ERROR) << "Wrote the wrong number of bytes";
return false;
}
return true;
}
template <typename T>
bool WriteByPointer(const T* ptr, uint32_t size, bool should_succeed = true) {
DWORD bytes_written = 0;
bool success = ::WriteFile(response_write_handle_.Get(), ptr, size,
&bytes_written, nullptr);
if (should_succeed && !success) {
PLOG(ERROR) << "Could not write to pipe.";
return false;
}
if (should_succeed) {
if (bytes_written != size) {
LOG(ERROR) << "Wrote the wrong number of bytes";
return false;
}
}
return true;
}
bool SendMessage(google::protobuf::MessageLite& message) {
std::string message_content;
if (!message.SerializeToString(&message_content)) {
LOG(ERROR) << "Could not serialize message for sending";
return false;
}
uint32_t message_size = message_content.size();
if (!WriteByValue(message_size)) {
return false;
}
if (!WriteByPointer(message_content.data(), message_content.size())) {
return false;
}
return true;
}
bool SendResponse(PromptUserResponse::PromptAcceptance prompt_acceptance) {
DCHECK(response_write_handle_.IsValid());
PromptUserResponse response;
response.set_prompt_acceptance(prompt_acceptance);
return SendMessage(response);
}
private:
base::win::ScopedHandle request_read_handle_;
base::win::ScopedHandle response_write_handle_;
} | class | c++ | 607 |
int del_timer_sync(struct timer_list * timer)
{
int ret = 0;
for (;;) {
unsigned long flags;
int running;
spin_lock_irqsave(&timerlist_lock, flags);
ret += detach_timer(timer);
timer->list.next = timer->list.prev = 0;
running = timer_is_running(timer);
spin_unlock_irqrestore(&timerlist_lock, flags);
if (!running)
break;
timer_synchronize(timer);
}
return ret;
} | function | c | 608 |
public class Record<K, V> {
private final K key;
private final V value;
private final long timestamp;
private final Headers headers;
/**
* The full constructor, specifying all the attributes of the record.
*
* Note: this constructor makes a copy of the headers argument.
* See {@link ProcessorContext#forward(Record)} for
* considerations around mutability of keys, values, and headers.
*
* @param key The key of the record. May be null.
* @param value The value of the record. May be null.
* @param timestamp The timestamp of the record. May not be negative.
* @param headers The headers of the record. May be null, which will cause subsequent calls
* to {@link #headers()} to return a non-null, empty, {@link Headers} collection.
* @throws IllegalArgumentException if the timestamp is negative.
* @see ProcessorContext#forward(Record)
*/
public Record(final K key, final V value, final long timestamp, final Headers headers) {
this.key = key;
this.value = value;
if (timestamp < 0) {
throw new StreamsException(
"Malformed Record",
new IllegalArgumentException("Timestamp may not be negative. Got: " + timestamp)
);
}
this.timestamp = timestamp;
this.headers = new RecordHeaders(headers);
}
/**
* Convenience constructor in case you do not wish to specify any headers.
* Subsequent calls to {@link #headers()} will return a non-null, empty, {@link Headers} collection.
*
* @param key The key of the record. May be null.
* @param value The value of the record. May be null.
* @param timestamp The timestamp of the record. May not be negative.
*
* @throws IllegalArgumentException if the timestamp is negative.
*/
public Record(final K key, final V value, final long timestamp) {
this(key, value, timestamp, null);
}
/**
* The key of the record. May be null.
*/
public K key() {
return key;
}
/**
* The value of the record. May be null.
*/
public V value() {
return value;
}
/**
* The timestamp of the record. Will never be negative.
*/
public long timestamp() {
return timestamp;
}
/**
* The headers of the record. Never null.
*/
public Headers headers() {
return headers;
}
/**
* A convenient way to produce a new record if you only need to change the key.
*
* Copies the attributes of this record with the key replaced.
*
* @param key The key of the result record. May be null.
* @param <NewK> The type of the new record's key.
* @return A new Record instance with all the same attributes (except that the key is replaced).
*/
public <NewK> Record<NewK, V> withKey(final NewK key) {
return new Record<>(key, value, timestamp, headers);
}
/**
* A convenient way to produce a new record if you only need to change the value.
*
* Copies the attributes of this record with the value replaced.
*
* @param value The value of the result record.
* @param <NewV> The type of the new record's value.
* @return A new Record instance with all the same attributes (except that the value is replaced).
*/
public <NewV> Record<K, NewV> withValue(final NewV value) {
return new Record<>(key, value, timestamp, headers);
}
/**
* A convenient way to produce a new record if you only need to change the timestamp.
*
* Copies the attributes of this record with the timestamp replaced.
*
* @param timestamp The timestamp of the result record.
* @return A new Record instance with all the same attributes (except that the timestamp is replaced).
*/
public Record<K, V> withTimestamp(final long timestamp) {
return new Record<>(key, value, timestamp, headers);
}
/**
* A convenient way to produce a new record if you only need to change the headers.
*
* Copies the attributes of this record with the headers replaced.
* Also makes a copy of the provided headers.
*
* See {@link ProcessorContext#forward(Record)} for
* considerations around mutability of keys, values, and headers.
*
* @param headers The headers of the result record.
* @return A new Record instance with all the same attributes (except that the headers are replaced).
*/
public Record<K, V> withHeaders(final Headers headers) {
return new Record<>(key, value, timestamp, headers);
}
@Override
public String toString() {
return "Record{" +
"key=" + key +
", value=" + value +
", timestamp=" + timestamp +
", headers=" + headers +
'}';
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final Record<?, ?> record = (Record<?, ?>) o;
return timestamp == record.timestamp &&
Objects.equals(key, record.key) &&
Objects.equals(value, record.value) &&
Objects.equals(headers, record.headers);
}
@Override
public int hashCode() {
return Objects.hash(key, value, timestamp, headers);
}
} | class | java | 609 |
public class UuidGenerator {
/** Constructor */
protected UuidGenerator() {
super();
}
/**
* Generates and returns a UUID
*
* @return The generated UUID
*/
private static UUID generateId() {
return UUID.randomUUID();
}
/**
* Generates and returns a random UUID that is all lowercase and has enclosing brackets
*
* @return A UUID string that is all lowercase and has enclosing brackets
*/
public static String generateIdAsString() {
return toString(generateId());
}
/**
* Converts the specified UUID into string that is all lowercase and has enclosing brackets
*
* @param uuid The UUID
* @return A UUID string that is all lowercase and has enclosing brackets
*/
public static String toString(final UUID uuid) {
return "{" + uuid.toString().toLowerCase() + "}";
}
/**
* Converts the specified UUID string into a UUID instance
*
* @param uuid The UUID string
* @return The corresponding UUID instance
*/
protected static UUID fromString(final String uuid) {
return UUID.fromString(uuid.replaceAll("[{}]+", ""));
}
/**
* Normalizes the specified UUID string
*
* @param uuid The UUID string
* @return A UUID string that is all lowercase and has enclosing brackets
*/
public static String normalize(final String uuid) {
return toString(fromString(uuid));
}
} | class | java | 610 |
TEST_F(LastDownloadFinderTest, DeleteBeforeResults) {
TestingProfile* profile = CreateProfile(EXTENDED_REPORTING_OPT_IN);
AddDownload(profile, CreateTestDownloadRow(kBinaryFileName));
LastDownloadFinder::Create(GetDownloadDetailsGetter(),
base::Bind(&LastDownloadFinderTest::NeverCalled,
base::Unretained(this))).reset();
FlushHistoryBackend(profile);
} | function | c++ | 611 |
def zero_all_if_any_non_finite(structure):
flat = tf.nest.flatten(structure)
if not flat:
return (structure, tf.constant(0))
flat_bools = [tf.reduce_all(tf.math.is_finite(t)) for t in flat]
all_finite = functools.reduce(tf.logical_and, flat_bools)
if all_finite:
return (structure, tf.constant(0))
else:
return (tf.nest.map_structure(tf.zeros_like, structure), tf.constant(1)) | function | python | 612 |
xrap_msg_t *
xrap_msg_recv (void *input)
{
assert (input);
xrap_msg_t *self = xrap_msg_new (0);
zframe_t *frame = NULL;
size_t string_size;
while (true) {
if (zsocket_type (input) == ZMQ_ROUTER) {
zframe_destroy (&self->address);
self->address = zframe_recv (input);
if (!self->address)
goto empty;
if (!zsocket_rcvmore (input))
goto malformed;
}
frame = zframe_recv (input);
if (!frame)
goto empty;
self->needle = zframe_data (frame);
self->ceiling = self->needle + zframe_size (frame);
uint16_t signature;
GET_NUMBER2 (signature);
if (signature == (0xAAA0 | 5))
break;
while (zsocket_rcvmore (input)) {
zframe_destroy (&frame);
frame = zframe_recv (input);
}
zframe_destroy (&frame);
}
GET_NUMBER1 (self->id);
switch (self->id) {
case XRAP_MSG_POST:
free (self->parent);
GET_STRING (self->parent);
free (self->content_type);
GET_STRING (self->content_type);
if (!zsocket_rcvmore (input))
goto malformed;
self->content_body = zframe_recv (input);
break;
case XRAP_MSG_POST_OK:
GET_NUMBER2 (self->status_code);
free (self->location);
GET_STRING (self->location);
free (self->etag);
GET_STRING (self->etag);
GET_NUMBER8 (self->date_modified);
free (self->content_type);
GET_STRING (self->content_type);
if (!zsocket_rcvmore (input))
goto malformed;
self->content_body = zframe_recv (input);
break;
case XRAP_MSG_GET:
free (self->resource);
GET_STRING (self->resource);
GET_NUMBER8 (self->if_modified_since);
free (self->if_none_match);
GET_STRING (self->if_none_match);
free (self->content_type);
GET_STRING (self->content_type);
break;
case XRAP_MSG_GET_OK:
GET_NUMBER2 (self->status_code);
free (self->content_type);
GET_STRING (self->content_type);
if (!zsocket_rcvmore (input))
goto malformed;
self->content_body = zframe_recv (input);
break;
case XRAP_MSG_GET_EMPTY:
GET_NUMBER2 (self->status_code);
break;
case XRAP_MSG_PUT:
free (self->resource);
GET_STRING (self->resource);
GET_NUMBER8 (self->if_unmodified_since);
free (self->if_match);
GET_STRING (self->if_match);
free (self->content_type);
GET_STRING (self->content_type);
if (!zsocket_rcvmore (input))
goto malformed;
self->content_body = zframe_recv (input);
break;
case XRAP_MSG_PUT_OK:
GET_NUMBER2 (self->status_code);
free (self->location);
GET_STRING (self->location);
free (self->etag);
GET_STRING (self->etag);
GET_NUMBER8 (self->date_modified);
break;
case XRAP_MSG_DELETE:
free (self->resource);
GET_STRING (self->resource);
GET_NUMBER8 (self->if_unmodified_since);
free (self->if_match);
GET_STRING (self->if_match);
break;
case XRAP_MSG_DELETE_OK:
GET_NUMBER2 (self->status_code);
break;
case XRAP_MSG_ERROR:
GET_NUMBER2 (self->status_code);
free (self->status_text);
GET_STRING (self->status_text);
break;
default:
goto malformed;
}
zframe_destroy (&frame);
return self;
malformed:
printf ("E: malformed message '%d'\n", self->id);
empty:
zframe_destroy (&frame);
xrap_msg_destroy (&self);
return (NULL);
} | function | c | 613 |
private Float retrieveResolvedParentDeclaredHeight() {
if (parent != null && parent.<UnitValue>getProperty(Property.HEIGHT) != null) {
UnitValue parentHeightUV = getPropertyAsUnitValue(parent, Property.HEIGHT);
if (parentHeightUV.isPointValue()) {
return parentHeightUV.getValue();
} else {
return ((AbstractRenderer) parent).retrieveHeight();
}
} else {
return null;
}
} | function | java | 614 |
VOS_STATUS WDA_shutdown(v_PVOID_t pVosContext, wpt_boolean closeTransport)
{
WDI_Status wdiStatus;
VOS_STATUS status = VOS_STATUS_SUCCESS;
tWDA_CbContext *pWDA = (tWDA_CbContext *)VOS_GET_WDA_CTXT(pVosContext);
if (NULL == pWDA)
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"%s: Invoked with invalid pWDA", __func__ );
VOS_ASSERT(0);
return VOS_STATUS_E_FAILURE;
}
if( (WDA_READY_STATE != pWDA->wdaState) &&
(WDA_INIT_STATE != pWDA->wdaState) &&
(WDA_START_STATE != pWDA->wdaState) )
{
VOS_ASSERT(0);
}
if (eDRIVER_TYPE_MFG != pWDA->driverMode)
{
if(VOS_TRUE == pWDA->wdaTimersCreated)
{
wdaDestroyTimers(pWDA);
pWDA->wdaTimersCreated = VOS_FALSE;
}
}
else
{
vos_event_destroy(&pWDA->ftmStopDoneEvent);
}
wdiStatus = WDI_Shutdown(closeTransport);
if (IS_WDI_STATUS_FAILURE(wdiStatus) )
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"error in WDA Stop" );
status = VOS_STATUS_E_FAILURE;
}
pWDA->wdaState = WDA_STOP_STATE;
status = vos_event_destroy(&pWDA->txFrameEvent);
if(!VOS_IS_STATUS_SUCCESS(status))
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"VOS Event destroy failed - status = %d", status);
status = VOS_STATUS_E_FAILURE;
}
status = vos_event_destroy(&pWDA->suspendDataTxEvent);
if(!VOS_IS_STATUS_SUCCESS(status))
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"VOS Event destroy failed - status = %d", status);
status = VOS_STATUS_E_FAILURE;
}
status = vos_event_destroy(&pWDA->waitOnWdiIndicationCallBack);
if(!VOS_IS_STATUS_SUCCESS(status))
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"VOS Event destroy failed - status = %d", status);
status = VOS_STATUS_E_FAILURE;
}
status = vos_free_context(pVosContext,VOS_MODULE_ID_WDA,pWDA);
if ( !VOS_IS_STATUS_SUCCESS(status) )
{
VOS_TRACE( VOS_MODULE_ID_WDA, VOS_TRACE_LEVEL_ERROR,
"error in WDA close " );
status = VOS_STATUS_E_FAILURE;
}
return status;
} | function | c | 615 |
private boolean containsInRect(RectF target, float x, float y, int touchPadding) {
mRectF.set(target);
mRectF.offset(getPaddingStart() + mParams.marginStart, getPaddingTop() + mParams.marginTop);
mRectF.set(mRectF.left - touchPadding,
mRectF.top - touchPadding,
mRectF.right + touchPadding,
mRectF.bottom + touchPadding);
return mRectF.contains(x, y);
} | function | java | 616 |
def np_fast_walsh_hadamard(x, axis, normalize=True):
orig_shape = x.shape
assert axis >= 0 and axis < len(orig_shape), (
'For a vector of shape %s, axis must be in [0, %d] but it is %d'
% (orig_shape, len(orig_shape) - 1, axis))
h_dim = orig_shape[axis]
h_dim_exp = int(round(np.log(h_dim) / np.log(2)))
assert h_dim == 2 ** h_dim_exp, (
'hadamard can only be computed over axis with size that is a power of two, but'
' chosen axis %d has size %d' % (axis, h_dim))
working_shape_pre = [int(np.prod(orig_shape[:axis]))]
working_shape_post = [int(np.prod(orig_shape[axis+1:]))]
working_shape_mid = [2] * h_dim_exp
working_shape = working_shape_pre + working_shape_mid + working_shape_post
ret = x.reshape(working_shape)
for ii in range(h_dim_exp):
dim = ii + 1
arrs = np.split(ret, 2, axis=dim)
assert len(arrs) == 2
ret = np.concatenate((arrs[0] + arrs[1], arrs[0] - arrs[1]), axis=dim)
if normalize:
ret = ret / np.sqrt(float(h_dim))
ret = ret.reshape(orig_shape)
return ret | function | python | 617 |
def from_dat_file(cls, file_path):
with open(file_path) as f:
grid_string = f.readlines()
grouped_grids = []
for line in grid_string:
coordinate_list = ast.literal_eval(line)
grouped_grids.append(coordinate_list)
return cls(grid=grouped_grids[0]) | function | python | 618 |
public class RMIManipulatorProxy extends UnicastRemoteObject implements RemoteManipulator {
private static final long serialVersionUID = 25681338871L;
/**
* The class used for the manipulator backend. If this is set it will be
* written to {@link #CONF_MANIPULATOR_CLASS} before submitting
* the configuration to {@link ManipulatorFactory#createManipulator}.
* <p/>
* If this property is not set the proxy will fall back to
* {@link #DEFAULT_BACKEND}.
*/
public static final String CONF_BACKEND = "summa.index.rmi.backend";
/**
* Default class for the manipulator backend implementation as defined
* in the {@link #CONF_BACKEND} property.
*/
public static final Class<? extends IndexManipulator> DEFAULT_BACKEND = IndexControllerImpl.class;
/**
* Configuration property specifying which port the registry used by
* the indexer can be found on. Default value is
* {@link #DEFAULT_REGISTRY_PORT}.
*/
public static final String CONF_REGISTRY_PORT = "summa.index.rmi.registry.port";
/**
* Default value for the {@link #CONF_REGISTRY_PORT} property.
*/
public static final int DEFAULT_REGISTRY_PORT = 28000;
/**
* Configuration property specifying the service name of the indexer
* service. Default is {@link #DEFAULT_SERVICE_NAME}.
*/
public static final String CONF_SERVICE_NAME = "summa.index.rmi.service.name";
/**
* Default value for the {@link #CONF_SERVICE_NAME} property.
*/
public static final String DEFAULT_SERVICE_NAME = "summa-indexer";
/**
* The port RMI communications should run over. The default value for this
* property is {@link #DEFAULT_SERVICE_PORT}.
*/
public static final String CONF_SERVICE_PORT = "summa.index.rmi.servic.eport";
/**
* Default value for the {@link #CONF_SERVICE_PORT} property. Using port
* 0 means that a random anonymous port will be used for communications.
*/
public static final int DEFAULT_SERVICE_PORT = 0;
private static final Log log = LogFactory.getLog(RMIManipulatorProxy.class);
private IndexManipulator backend;
private String serviceName;
private int registryPort;
public RMIManipulatorProxy(Configuration conf) throws IOException {
super(getServicePort(conf));
/* Create configuration for the backend, based on our own,
* rewriting the class property if necessary */
// FIXME: The below config should really be kept entirely in memory,
// but we can't use a memorybased config because of bug:
// https://gforge.statsbiblioteket.dk/tracker/index.php?func=detail&aid=1453&group_id=8&atid=109
Configuration backendConf = new Configuration(new XStorage(false));
backendConf.importConfiguration(conf);
if (conf.valueExists(CONF_BACKEND)) {
backendConf.set(CONF_MANIPULATOR_CLASS, conf.getString(CONF_BACKEND));
} else {
log.info(CONF_BACKEND + " not set, using " + DEFAULT_BACKEND + " for backend");
backendConf.set(CONF_MANIPULATOR_CLASS, DEFAULT_BACKEND);
}
/* If the backend is set to be another RMIManipulatorProxy then avoid
* infinite recursion */
if (backendConf.valueExists(CONF_MANIPULATOR_CLASS)) {
if (this.getClass().getName().equals(backendConf.getString(CONF_MANIPULATOR_CLASS))) {
throw new ConfigurationException("Nested RMIManipulatorProxy objects not allowed");
}
}
if (log.isTraceEnabled()) {
log.trace("Backend conf:\n" + backendConf.dumpString());
}
log.trace("Creating manipulator backend");
backend = ManipulatorFactory.createManipulator(backendConf);
log.trace("Created manipulator: " + backend.getClass().getName());
serviceName = conf.getString(CONF_SERVICE_NAME, DEFAULT_SERVICE_NAME);
registryPort = conf.getInt(CONF_REGISTRY_PORT, DEFAULT_REGISTRY_PORT);
RemoteHelper.exportRemoteInterface(this, registryPort, serviceName);
try {
RemoteHelper.exportMBean(this);
} catch (Exception e) {
String msg = "Error exporting MBean of '" + this + "'. Going on without it: " + e.getMessage();
if (log.isTraceEnabled()) {
log.warn(msg, e);
} else {
log.warn(msg);
}
}
}
private static int getServicePort(Configuration conf) {
try {
return conf.getInt(CONF_SERVICE_PORT);
} catch (NullPointerException e) {
log.warn("Service port not defined in " + CONF_SERVICE_PORT + ". Falling back to anonymous port");
return DEFAULT_SERVICE_PORT;
}
}
@Override
public void open(File indexRoot) throws RemoteException {
try {
backend.open(indexRoot);
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("open(%s) for %d:%s", indexRoot, registryPort,
serviceName), t);
}
}
@Override
public void clear() throws RemoteException {
try {
backend.clear();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("clear() for %d:%s", registryPort, serviceName), t);
}
}
@Override
public boolean update(Payload payload) throws RemoteException {
try {
return backend.update(payload);
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("update(%s) for %d:%s", payload, registryPort,
serviceName), t);
return false; // exitOnThrowable always throws
}
}
@Override
public void commit() throws RemoteException {
try {
backend.commit();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("commit() for %d:%s", registryPort, serviceName), t);
}
}
@Override
public void consolidate() throws RemoteException {
try {
backend.consolidate();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("consolidate() for %d:%s", registryPort, serviceName), t);
}
}
@Override
public void close() throws RemoteException {
try {
backend.close();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("close() for %d:%s", registryPort, serviceName), t);
}
}
@Override
public void orderChangedSinceLastCommit() throws RemoteException {
try {
backend.orderChangedSinceLastCommit();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("orderChangedSinceLastCommit() for %d:%s",
registryPort, serviceName), t);
}
}
@Override
public boolean isOrderChangedSinceLastCommit() throws RemoteException {
try {
return backend.isOrderChangedSinceLastCommit();
} catch (Throwable t) {
RemoteHelper.exitOnThrowable(log, String.format("isOrderChangedSinceLastCommit() for %d:%s",
registryPort, serviceName), t);
return true; // We bomb out in a few seconds, so the value is random
}
}
} | class | java | 619 |
private static ArraySegment<int> _splitOffWrappingDirectives(
ref ArraySegment<int> rawMapping)
{
var dirCount = rawMapping.Array[rawMapping.Offset + rawMapping.Count - 1];
var actualMapping = new ArraySegment<int>(rawMapping.Array, rawMapping.Offset,
rawMapping.Count - dirCount - 1);
rawMapping = new ArraySegment<int>(rawMapping.Array, actualMapping.Count, dirCount);
return actualMapping;
} | function | c# | 620 |
func TestTxnCoordSenderErrorWithIntent(t *testing.T) {
defer leaktest.AfterTest(t)()
stopper := stop.NewStopper()
defer stopper.Stop(context.TODO())
manual := hlc.NewManualClock(123)
clock := hlc.NewClock(manual.UnixNano, 20*time.Nanosecond)
testCases := []struct {
roachpb.Error
errMsg string
}{
{*roachpb.NewError(roachpb.NewTransactionRetryError(roachpb.RETRY_REASON_UNKNOWN)), "retry txn"},
{
*roachpb.NewError(roachpb.NewTransactionPushError(roachpb.Transaction{
TxnMeta: enginepb.TxnMeta{ID: uuid.MakeV4()}}),
), "failed to push",
},
{*roachpb.NewErrorf("testError"), "testError"},
}
for i, test := range testCases {
t.Run("", func(t *testing.T) {
var senderFn client.SenderFunc = func(_ context.Context, ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) {
txn := ba.Txn.Clone()
txn.Writing = true
pErr := &roachpb.Error{}
*pErr = test.Error
pErr.SetTxn(&txn)
return nil, pErr
}
factory := NewTxnCoordSenderFactory(
TxnCoordSenderFactoryConfig{
AmbientCtx: log.AmbientContext{Tracer: tracing.NewTracer()},
Clock: clock,
Stopper: stopper,
},
senderFn,
)
var ba roachpb.BatchRequest
key := roachpb.Key("test")
ba.Add(&roachpb.PutRequest{RequestHeader: roachpb.RequestHeader{Key: key}})
ba.Add(&roachpb.EndTransactionRequest{})
txn := roachpb.MakeTransaction("test", key, 0, clock.Now(), 0)
meta := roachpb.MakeTxnCoordMeta(txn)
tc := factory.TransactionalSender(client.RootTxn, meta)
ba.Txn = &txn
_, pErr := tc.Send(context.Background(), ba)
if !testutils.IsPError(pErr, test.errMsg) {
t.Errorf("%d: error did not match %s: %v", i, test.errMsg, pErr)
}
})
}
} | function | go | 621 |
class Parms
{
public:
void clear() {
m.clear();
}
bool getBool(const char* name, bool def = false) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::BOOL1) return def;
return (*i).second.getBool();
}
int getInt(const char* name, int def = zero) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::INT1) return def;
return (*i).second.getInt();
}
float getFloat(const char* name, float def = zero) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::FLOAT1) return def;
return (*i).second.getFloat();
}
Vec2f getVec2f(const char* name, const Vec2f& def = zero) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::FLOAT2) return def;
return (*i).second.getVec2f();
}
Vec3f getVec3f(const char* name, const Vec3f& def = zero) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::FLOAT3) return def;
return (*i).second.getVec3f();
}
Vec3fa getVec3fa(const char* name, const Vec3fa& def = zero) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::FLOAT3) return def;
return (*i).second.getVec3fa();
}
std::string getString(const char* name, std::string def = "") const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::STRING) return def;
return (*i).second.getString();
}
std::shared_ptr<Texture> getTexture(const char* name) const {
std::map<std::string,Variant>::const_iterator i = m.find(name);
if (i == m.end() || (*i).second.type != Variant::TEXTURE) return std::shared_ptr<Texture>();
return (*i).second.getTexture();
}
void add(const std::string& name, Variant data) {
m[name] = data;
}
private:
std::map<std::string,Variant> m;
} | class | c++ | 622 |
protected virtual SqlExpression VisitCollate(
[NotNull] MySqlCollateExpression collateExpression, bool allowOptimizedExpansion, out bool nullable)
{
Check.NotNull(collateExpression, nameof(collateExpression));
var valueExpression = Visit(collateExpression.ValueExpression, allowOptimizedExpansion, out nullable);
return collateExpression.Update(valueExpression);
} | function | c# | 623 |
class SimpleVehicle:
"""Simple longitudinal vehicle model.
The input is the desired acceleration, which is filtered with a low-pass
filter and integrated twice to obtain the actual acceleration, velocity and
position of the vehicle.
"""
def __init__(self, dt=0.1, T=0.5, K=1., a_init=0, v_init=0, x_init=0, allow_v_negative=True, opts={}):
"""Initialization of parameters
Args:
dt: Interval in seconds.
T: Time constant of PT1-filter.
K: Gain of PT1-filter.
a_init: Initial longitudinal acceleration of the vehicle.
v_init: Initial longitudinal velocity of the vehicle.
x_init: Initial longitudinal position of the vehicle.
allow_v_negative: If set to False, the vehicle will not drive backwards.
"""
self.PT1 = PT1(dt=dt, T=T, init=a_init, K=K)
if allow_v_negative:
self.I1 = Integrator(dt=dt, init=v_init)
else:
self.I1 = Integrator(dt=dt, init=v_init, min_value=0)
self.I2 = Integrator(dt=dt, init=x_init)
self.allow_v_negative = allow_v_negative
def step(self, a):
"""Simulates one time step.
Args:
a: Desired acceleration.
Returns:
a: Actual longitudinal acceleration.
v: Actual longitudinal velocity.
x: Actual longitudinal position.
"""
a = self.PT1(a)
v = self.I1(a)
x = self.I2(v)
if not self.allow_v_negative and a < 0 and v <= 0:
a = 0
return a, v, x
def reset(self):
"""Resets all values to initial values.
"""
self.I1.reset()
self.PT1.reset()
self.I2.reset()
def set_v_init(self, v_init):
"""Sets the initial velocity.
Args:
v_init: Initial longitudinal velocity.
"""
self.v_init = v_init
self.I1.init_value = v_init
def set_x_init(self, x_init):
"""Sets the initial position.
Args:
v_init: Initial longitudinal position.
"""
self.x_init = x_init
self.I2.init_value = x_init
def set_v(self, v):
"""Sets the current velocity.
Args:
v: Current longitudinal velocity.
"""
self.I1.value = v
def get_v(self):
"""Gets the current velocity.
Returns:
Current longitudinal velocity.
"""
return self.I1.value
def set_s(self, s):
"""Sets the current position.
Args:
v: Current longitudinal position.
"""
self.I2.value = s
def get_s(self):
"""Gets the current position.
Returns:
Current longitudinal position.
"""
return self.I2.value | class | python | 624 |
static gboolean check_dir_empty_functor(const char * filename,
gpointer user_data) {
VfsDevice * self;
char * path_name;
Device *d_self;
self = VFS_DEVICE(user_data);
d_self = DEVICE(self);
if (strcmp(filename, VOLUME_LOCKFILE_NAME) == 0)
return TRUE;
path_name = vstralloc(self->dir_name, "/", filename, NULL);
g_warning(_("Found spurious storage file %s"), path_name);
amfree(path_name);
return TRUE;
} | function | c | 625 |
def df_latency_preemption(self, task):
return self._df_latency(
task,
'preempt_latency',
TaskState.TASK_RUNNING,
TaskState.TASK_ACTIVE
) | function | python | 626 |
start() {
this.activityIframeView_.onMessage(result => {
if (result['alreadySubscribed']) {
this.deps_.callbacks().triggerLoginRequest({
linkRequested: !!result['linkRequested'],
});
return;
}
if (result['sku']) {
new PayStartFlow(
this.deps_,
(result['sku']))
.start();
return;
}
if (result['native']) {
this.deps_.callbacks().triggerSubscribeRequest();
return;
}
});
return this.dialogManager_.openView(this.activityIframeView_);
} | function | javascript | 627 |
function pushCards(state, action, cardPackage) {
const pile_number = action.sequence.to.pile_number;
let pile;
switch (action.sequence.to.pile) {
case ('foundation'):
pile = state.foundation[pile_number];
cardPackage = cardPackage.filter(value => value != null);
state.foundation[pile_number] = pile.concat(cardPackage);
break;
case ('tableau'):
pile = state.tableau[pile_number];
state.tableau[pile_number] = pile.concat(cardPackage);
break;
}
} | function | javascript | 628 |
def __CheckIfPreconditionerTypeIsDeprecated(config):
if config.Has("preconditioner_type"):
preconditioner_type = config["preconditioner_type"].GetString()
old_new_name_map = {
"None" : "none",
"DiagonalPreconditioner" : "diagonal",
"ILU0Preconditioner" : "ilu0",
"ILUPreconditioner" : "ilu"
}
if preconditioner_type in old_new_name_map:
new_name = old_new_name_map[preconditioner_type]
depr_msg = 'DEPRECATION-WARNING: \nUsing a deprecated "preconditioner_type"!\n'
depr_msg += 'Replace "' + preconditioner_type + '" with "' + new_name + '"'
KM.Logger.PrintWarning("Linear-Solver-Factory", depr_msg)
config["preconditioner_type"].SetString(new_name) | function | python | 629 |
func PrintUnicodeRunes(content string) {
if content == "" {
return
}
for _, char := range asciiFilter.FindAllString(content, -1) {
if char == "" {
continue
}
r := []rune(char)
fmt.Printf("%U\n", r)
}
} | function | go | 630 |
public class StreamIdBasedExpression<R> extends UnresolvedStreamIdBasedExpression<R> {
private static final long serialVersionUID = 1L;
private final StreamId<R> streamId;
protected StreamIdBasedExpression(StreamId<R> streamId) {
super(ResolvedExpression.of(streamId));
this.streamId = requireNonNull(streamId, "streamId must not be null.");
}
public static <R> StreamIdBasedExpression<R> of(StreamId<R> streamId) {
return new StreamIdBasedExpression<>(streamId);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((streamId() == null) ? 0 : streamId().hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
StreamIdBasedExpression<?> other = (StreamIdBasedExpression<?>) obj;
if (streamId() == null) {
if (other.streamId() != null) {
return false;
}
} else if (!streamId().equals(other.streamId())) {
return false;
}
return true;
}
@Override
public String toString() {
return "StreamIdBasedExpression [streamId=" + streamId() + "]";
}
public StreamId<R> streamId() {
return streamId;
}
} | class | java | 631 |
function findOutdatedReads(
codePathSegment,
surroundingFunction,
{
stateBySegmentStart = new WeakMap(),
stateBySegmentEnd = new WeakMap()
} = {}
) {
if (!stateBySegmentStart.has(codePathSegment)) {
stateBySegmentStart.set(codePathSegment, new AssignmentTrackerState());
}
const currentState = stateBySegmentStart.get(codePathSegment).copy();
expressionsByCodePathSegment.get(codePathSegment).forEach(({ entering, node }) => {
if (node.type === "AssignmentExpression") {
if (entering) {
currentState.enterAssignment(node);
} else {
currentState.exitAssignment(node);
}
} else if (!entering && (node.type === "AwaitExpression" || node.type === "YieldExpression")) {
currentState.exitAwaitOrYield(node, surroundingFunction);
} else if (!entering && (node.type === "Identifier" || node.type === "MemberExpression")) {
currentState.exitIdentifierOrMemberExpression(node);
}
});
stateBySegmentEnd.set(codePathSegment, currentState);
codePathSegment.nextSegments.forEach(nextSegment => {
if (stateBySegmentStart.has(nextSegment)) {
if (!stateBySegmentStart.get(nextSegment).merge(currentState)) {
return;
}
} else {
stateBySegmentStart.set(nextSegment, currentState.copy());
}
findOutdatedReads(
nextSegment,
surroundingFunction,
{ stateBySegmentStart, stateBySegmentEnd }
);
});
} | function | javascript | 632 |
func (c CoverType) GeneratePath(external bool, iid string) string {
cdir := ".kobo-images"
if external {
cdir = "koboExtStorage/images-cache"
}
dir1, dir2, base := hashedImageParts(iid)
return fmt.Sprintf("%s/%s/%s/%s - %s.parsed", cdir, dir1, dir2, base, c.NickelString())
} | function | go | 633 |
public void drawButton(Graphics g, int offset){
int xx = x + offset;
int yy = y + yOff;
if(MouseInput.MOUSE.intersects(this)){
g.setColor(Color.YELLOW);
}else
g.setColor(Color.WHITE);
if(!MouseInput.pressed && MouseInput.MOUSE.intersects(this))
g.drawRect(x, y, width, height);
else if(MouseInput.pressed && MouseInput.MOUSE.intersects(this))
g.fillRect(x, y, width, height);
else
g.drawRect(x,y,width,height);
g.setColor(Color.RED);
g.drawString(text, xx, yy);
} | function | java | 634 |
function printResults(results) {
document.getElementById("results-label").innerHTML = "<span>Simulation results</span>";
if (results == null) {
var resultElems = document.getElementsByClassName("final-result");
for (var i = 0; i < resultElems.length; i++) {
resultElems[i].innerHTML = "";
}
return;
}
resultRunTime.innerHTML = stepsToTime(results["run_time"]);
if (results["cause"] == "no_more_agents") {
resultCause.innerHTML = "No active agents left";
} else if (results["cause"] == "interrupted") {
resultCause.innerHTML = "Interrupted by the user";
} else {
resultCause.innerHTML = "Time limit reached";
}
resultAgentsEvac.innerHTML = results["total_evac"];
resultAdultsEvac.innerHTML = results["adult_evac"];
resultElderlyEvac.innerHTML = results["elderly_evac"];
resultDisabledEvac.innerHTML = results["disabled_evac"];
resultChildrenEvac.innerHTML = results["children_evac"];
resultAgentsUnconsc.innerHTML = results["total_unconsc"];
resultAdultsUnconsc.innerHTML = results["adult_unconsc"];
resultElderlyUnconsc.innerHTML = results["elderly_unconsc"];
resultDisabledUnconsc.innerHTML = results["disabled_unconsc"];
resultChildrenUnconsc.innerHTML = results["children_unconsc"];
resultAgentsDead.innerHTML = results["total_dead"];
resultAdultsDead.innerHTML = results["adult_dead"];
resultElderlyDead.innerHTML = results["elderly_dead"];
resultDisabledDead.innerHTML = results["disabled_dead"];
resultChildrenDead.innerHTML = results["children_dead"];
resultAgentsActive.innerHTML = results["total_active"];
resultAdultsActive.innerHTML = results["adult_active"];
resultElderlyActive.innerHTML = results["elderly_active"];
resultDisabledActive.innerHTML = results["disabled_active"];
resultChildrenActive.innerHTML = results["children_active"];
initMinTime.innerHTML = stepsToTime(results["min-ideal-evac-time"]);
if (results["avg-ideal-evac-time"] != 0 && results["avg-ideal-evac-time"] != null) {
initAvgTime.innerHTML = stepsToTime(results["avg-ideal-evac-time"]);
} else {
initAvgTime.innerHTML = "-";
}
if (results["max-ideal-evac-time"] != 0 && results["max-ideal-evac-time"] != null) {
initMaxTime.innerHTML = stepsToTime(results["max-ideal-evac-time"]);
} else {
initMaxTime.innerHTML = "-";
}
if (results["cause"] != "no_more_agents") {
resultAvgTime.innerHTML = "> ";
resultMaxTime.innerHTML = "> ";
} else {
resultAvgTime.innerHTML = "";
resultMaxTime.innerHTML = "";
}
if (results["min-evac-time"] != 0 && results["min-evac-time"] != null) {
if (results["total_evac"] == 0) {
resultMinTime.innerHTML = "> ";
} else {
resultMinTime.innerHTML = "";
}
resultMinTime.innerHTML += stepsToTime(results["min-evac-time"]);
} else {
resultMinTime.innerHTML = "-";
}
if (results["avg-evac-time"] != 0 && results["avg-evac-time"] != null) {
resultAvgTime.innerHTML += stepsToTime(results["avg-evac-time"]);
} else {
resultAvgTime.innerHTML = "-";
}
if (results["max-evac-time"] != 0 && results["max-evac-time"] != null) {
resultMaxTime.innerHTML += stepsToTime(results["max-evac-time"]);
} else {
resultMaxTime.innerHTML = "-";
}
} | function | javascript | 635 |
public static void Forget(this Task task, string errorMessage = "")
{
task.ContinueWith(t =>
{
if (t.IsFaulted)
{
log.Error(t.Exception, errorMessage);
}
});
} | function | c# | 636 |
def replicate_cg_with_dst_resource_provisioning(self,
max_time_out_of_sync,
source_luns,
dst_pool_id,
dst_cg_name=None,
remote_system=None):
dst_resource = UnityResourceConfig.to_embedded(
name=dst_cg_name)
dst_element_configs = []
for source_lun in source_luns:
lun_resource_config = UnityResourceConfig.to_embedded(
pool_id=dst_pool_id,
is_thin_enabled=source_lun.is_thin_enabled,
size=source_lun.size_total, request_id=source_lun.id,
name=source_lun.name,
is_deduplication_enabled=source_lun.is_data_reduction_enabled,
is_compression_enabled=source_lun.is_data_reduction_enabled)
dst_element_configs.append(lun_resource_config)
result = UnityReplicationSession.create_with_dst_resource_provisioning(
self._cli, self.get_id(), dst_resource, max_time_out_of_sync,
remote_system=remote_system,
dst_resource_element_configs=dst_element_configs)
return result | function | python | 637 |
public class BloodletterBlock extends Block implements IRitualPropBlock {
public static final BooleanProperty FILLED = BooleanProperty.create("filled");
protected static final VoxelShape SHAPE = VoxelShapeUtils.fromModel(new ResourceLocation(PrimalMagick.MODID, "block/bloodletter"));
public BloodletterBlock() {
super(Block.Properties.create(Material.ROCK, MaterialColor.OBSIDIAN).hardnessAndResistance(1.5F, 6.0F).sound(SoundType.STONE));
this.setDefaultState(this.getDefaultState().with(FILLED, Boolean.FALSE));
}
@Override
protected void fillStateContainer(Builder<Block, BlockState> builder) {
builder.add(FILLED);
}
@Override
public VoxelShape getShape(BlockState state, IBlockReader worldIn, BlockPos pos, ISelectionContext context) {
return SHAPE;
}
@Override
public ActionResultType onBlockActivated(BlockState state, World worldIn, BlockPos pos, PlayerEntity player, Hand handIn, BlockRayTraceResult hit) {
if (player != null && player.getHeldItem(handIn).isEmpty() && !state.get(FILLED)) {
// If using an empty hand on an unfilled bloodletter, cut the player
if (!worldIn.isRemote) {
player.attackEntityFrom(DamageSourcesPM.BLEEDING, 2.0F);
worldIn.setBlockState(pos, state.with(FILLED, Boolean.TRUE), Constants.BlockFlags.DEFAULT_AND_RERENDER);
// If this block is awaiting activation for an altar, notify it
if (this.isPropOpen(state, worldIn, pos)) {
this.onPropActivated(state, worldIn, pos, this.getUsageStabilityBonus());
}
}
return ActionResultType.SUCCESS;
} else if (player != null && player.getHeldItem(handIn).getItem() == Items.WATER_BUCKET && state.get(FILLED)) {
// If using a water bucket on a filled bloodletter, clean it out
worldIn.playSound(player, pos, SoundEvents.ITEM_BUCKET_EMPTY, SoundCategory.BLOCKS, 1.0F, 1.0F);
if (!worldIn.isRemote) {
if (!player.abilities.isCreativeMode) {
player.setHeldItem(handIn, new ItemStack(Items.BUCKET));
}
worldIn.setBlockState(pos, state.with(FILLED, Boolean.FALSE), Constants.BlockFlags.DEFAULT_AND_RERENDER);
}
return ActionResultType.SUCCESS;
} else {
return ActionResultType.PASS;
}
}
@SuppressWarnings("deprecation")
@Override
public void onReplaced(BlockState state, World worldIn, BlockPos pos, BlockState newState, boolean isMoving) {
// Close out any pending ritual activity if replaced
if (!worldIn.isRemote && state.getBlock() != newState.getBlock()) {
this.closeProp(state, worldIn, pos);
}
super.onReplaced(state, worldIn, pos, newState, isMoving);
}
@OnlyIn(Dist.CLIENT)
@Override
public void animateTick(BlockState stateIn, World worldIn, BlockPos pos, Random rand) {
// Show spell sparkles if receiving salt power
if (this.isBlockSaltPowered(worldIn, pos)) {
FxDispatcher.INSTANCE.spellTrail(pos.getX() + rand.nextDouble(), pos.getY() + rand.nextDouble(), pos.getZ() + rand.nextDouble(), Color.WHITE.getRGB());
}
}
public float getStabilityBonus(World world, BlockPos pos) {
return 0.03F;
}
@Override
public float getSymmetryPenalty(World world, BlockPos pos) {
return 0.03F;
}
@Override
public boolean isPropActivated(BlockState state, World world, BlockPos pos) {
if (state != null && state.getBlock() instanceof BloodletterBlock) {
return state.get(FILLED);
} else {
return false;
}
}
@Override
public String getPropTranslationKey() {
return "primalmagick.ritual.prop.bloodletter";
}
public float getUsageStabilityBonus() {
return 15.0F;
}
@Override
public boolean hasTileEntity(BlockState state) {
return true;
}
@Override
public TileEntity createTileEntity(BlockState state, IBlockReader world) {
return new BloodletterTileEntity();
}
@SuppressWarnings("deprecation")
@Override
public boolean eventReceived(BlockState state, World worldIn, BlockPos pos, int id, int param) {
// Pass any received events on to the tile entity and let it decide what to do with it
super.eventReceived(state, worldIn, pos, id, param);
TileEntity tile = worldIn.getTileEntity(pos);
return (tile == null) ? false : tile.receiveClientEvent(id, param);
}
} | class | java | 638 |
public static void main(String[] args) throws Exception {
boolean swarm = args.length > 1 && PUBSUB_SITE.equals(args[1]);
if (swarm) {
swarmPubber(args);
} else {
singularPubber(args);
}
LOG.info("Done with main");
} | function | java | 639 |
internal bool CheckDigest(XadesSignature.UriResolver resolver)
{
try
{
Stream stream;
if (resolver != null)
stream = resolver(_uri);
else
using (WebClient wc = new WebClient())
stream = wc.OpenRead(_uri);
byte[] digest = XadesUtils.CalculateHash(stream, _transformChain, _digestMethod);
_isValid = XadesUtils.DigestEqual(digest, _reference.DigestValue);
}
catch (Exception)
{
_isValid = false;
}
return _isValid;
} | function | c# | 640 |
public void write(@Nonnull final Writer dataWriter) {
if (sexGenotypeDataList.isEmpty()) {
throw new IllegalStateException("The sex genotype data collection is empty");
}
/* check if extended genotyping information is available; first, check for nulls */
boolean extended = true;
if (sexGenotypeDataList.stream().filter(dat -> !dat.hasExtendedGenotypingInfo()).count() > 0) {
extended = false;
}
Set<String> commonGenotypes = new HashSet<>();
/* check if there is a non-empty intersection */
if (extended) {
/* pool all genotypes */
commonGenotypes = new HashSet<>(sexGenotypeDataList.get(0).getSexGenotypesSet());
for (final SexGenotypeData dat : sexGenotypeDataList) {
commonGenotypes = Sets.intersection(commonGenotypes, dat.getSexGenotypesSet());
}
if (commonGenotypes.isEmpty()) {
extended = false;
}
}
final TableColumnCollection columns;
if (extended) {
final List<String> columnNames = new ArrayList<>();
columnNames.addAll(SexGenotypeTableColumn.MANDATORY_SEX_GENOTYPE_COLUMNS.names());
columnNames.addAll(commonGenotypes);
columns = new TableColumnCollection(columnNames);
} else {
columns = new TableColumnCollection(SexGenotypeTableColumn.MANDATORY_SEX_GENOTYPE_COLUMNS.names());
}
try (final SexGenotypeTableWriter writer = new SexGenotypeTableWriter(dataWriter, columns)) {
writer.writeAllRecords(sexGenotypeDataList);
} catch (final IOException e) {
throw new UserException.CouldNotCreateOutputFile("Could not write sex genotype data", e);
}
} | function | java | 641 |
public static string display()
{
string[] stringPuzzle = new string[81];
int count = 0;
foreach (var cell in cells)
{
if (count % 9 != 0)
{
stringPuzzle[count] = cell.value.ToString() + " ";
}
else
{
stringPuzzle[count] = "\n" + cell.value.ToString() + " ";
}
count++;
}
string answer = string.Join("", stringPuzzle);
return answer;
} | function | c# | 642 |
private static byte[] HashBlock(KeyedHashAlgorithm Hasher, byte[] Salt, int BlockIndex, int IterationCount)
{
byte[] Data = Hasher.ComputeHash(GetFirstBlockData(Salt, BlockIndex));
byte[] Result = (byte[])Data.Clone();
for (var i = 2; i <= IterationCount; i++)
{
byte[] Temp = Hasher.ComputeHash(Data);
for (var j = 0; j < Temp.Length; j++)
{
Result[j] ^= Temp[j];
}
Data = Temp;
}
return Result;
} | function | c# | 643 |
func (dc *dcWrap) update(node *core.IpfsNode) []error {
var res []error
var (
m runtime.MemStats
ns *nodepb.Node_Settings
)
runtime.ReadMemStats(&m)
ctx, cancel := context.WithTimeout(context.Background(), updateTimeout)
defer cancel()
ns, err := helper.GetHostStorageConfig(ctx, node)
if err != nil {
res = append(res, fmt.Errorf("failed to get node storage config: %s", err.Error()))
} else {
dc.pn.StoragePriceAsk = ns.StoragePriceAsk
dc.pn.StoragePriceDefault = ns.StoragePriceDefault
dc.pn.CustomizedPricing = ns.CustomizedPricing
dc.pn.BandwidthPriceAsk = ns.BandwidthPriceAsk
dc.pn.StorageTimeMin = ns.StorageTimeMin
dc.pn.BandwidthLimit = ns.BandwidthLimit
dc.pn.CollateralStake = ns.CollateralStake
dc.pn.RepairPriceDefault = ns.RepairPriceDefault
dc.pn.RepairPriceCustomized = ns.RepairPriceCustomized
dc.pn.RepairCustomizedPricing = ns.RepairCustomizedPricing
dc.pn.ChallengePriceDefault = ns.ChallengePriceDefault
dc.pn.ChallengePriceCustomized = ns.ChallengePriceCustomized
dc.pn.ChallengeCustomizedPricing = ns.ChallengeCustomizedPricing
}
dc.pn.UpTime = durationToSeconds(time.Since(dc.pn.TimeCreated))
if cpus, err := cpu.Percent(0, false); err != nil {
res = append(res, fmt.Errorf("failed to get uptime: %s", err.Error()))
} else {
if dc.pn.CpuUsed = 0; len(cpus) >= 1 {
dc.pn.CpuUsed = cpus[0]
}
}
dc.pn.MemoryUsed = m.HeapAlloc / uint64(units.KiB)
if storage, err := dc.node.Repo.GetStorageUsage(); err != nil {
res = append(res, fmt.Errorf("failed to get storage usage: %s", err.Error()))
} else {
dc.pn.StorageUsed = storage / uint64(units.KiB)
}
bs, ok := dc.node.Exchange.(*bitswap.Bitswap)
if !ok {
res = append(res, fmt.Errorf("failed to perform dc.node.Exchange.(*bitswap.Bitswap) type assertion"))
return res
}
st, err := bs.Stat()
if err != nil {
res = append(res, fmt.Errorf("failed to perform bs.Stat() call: %s", err.Error()))
} else {
dc.pn.Upload = valOrZero(st.DataSent-dc.pn.TotalUpload) / uint64(units.KiB)
dc.pn.Download = valOrZero(st.DataReceived-dc.pn.TotalDownload) / uint64(units.KiB)
dc.pn.TotalUpload = st.DataSent / uint64(units.KiB)
dc.pn.TotalDownload = st.DataReceived / uint64(units.KiB)
dc.pn.BlocksUp = st.BlocksSent
dc.pn.BlocksDown = st.BlocksReceived
dc.pn.PeersConnected = uint64(len(st.Peers))
}
return res
} | function | go | 644 |
commit({ strict } = {}) {
if (isBoolean(strict)) this._strict = strict
else if (isEqual(this._currentDescriptor, this._nextDescriptor)) return false
this._currentDescriptor = cloneDeep(this._nextDescriptor)
this._build()
return true
} | function | javascript | 645 |
def update(self):
ref_format = self.ref_format
if ref_format != "text":
return
body = self.document_body
if not body:
body = self.root
name = self.name
reference = body.get_reference_mark(name=name)
if not reference:
return
self.text = reference.referenced_text() | function | python | 646 |
def change_reference_path_prompt():
ref_node = chose_reference_from_scene_prompt()
if not ref_node:
return
ref_path = mc.referenceQuery(ref_node, filename=True, unresolvedName=True)
ref_path = remove_reference_number(ref_path)
resolved_path = os.path.expandvars(ref_path)
directory = os.path.dirname(resolved_path)
name = os.path.basename(resolved_path)
name_start, extension = os.path.splitext(name)
name_start = name_start.split('_')[0].split('.')[0]
name_pattern = name_start + '*' + extension
matching_files = glob.glob(os.path.join(directory, name_pattern))
matching_files = [os.path.basename(p) for p in matching_files]
new_version = chose_from_list_prompt(matching_files)
if not new_version:
return
new_unresolved_path = os.path.join(os.path.dirname(ref_path), new_version)
new_unresolved_path = new_unresolved_path.replace('\\', '/')
if new_unresolved_path == ref_path:
mc.warning('Skipping, no reference path change (%s).' % ref_path)
return
mc.file(new_unresolved_path, loadReference=ref_node, prompt=False) | function | python | 647 |
protected virtual void UpdatePosition()
{
FPCamera.PositionOffset = (Vector3.up * FPCamera.PositionOffset.y);
CenterEyeAnchor.root.position = FPController.SmoothPosition;
if (!ProceduralMotion.DisallowAll)
CenterEyeAnchor.root.position += FPCamera.SpringState;
else
CenterEyeAnchor.root.position += (Vector3.up * FPCamera.PositionOffset.y);
FPCamera.transform.position = CenterEyeAnchor.position;
} | function | c# | 648 |
async sendFileInfoToMessage_(file, taskBaseInfo) {
const {bucket, name: fileName} = file;
const storageFile = this.options.getStorage(bucket, fileName);
const {size, topic} = taskBaseInfo;
const gcsSplitSize = 1000 * 1000 *
getProperValue(parseFloat(size), STORAGE_FILE_MAXIMUM_SIZE, false);
this.logger.debug(`Split file into size: ${gcsSplitSize}`);
const slicedFiles = await storageFile.split(gcsSplitSize);
const reducedFn = async (previous, slicedFile, index) => {
const previousResult = await previous;
const data = JSON.stringify({file: slicedFile, bucket});
const taskEntity = Object.assign({slicedFile}, taskBaseInfo);
this.logger.debug(`[${index}] Send ${data} to Topic[${topic}].`);
const currentResult = await this.saveTaskAndSendData_(taskEntity, data);
return currentResult && previousResult;
};
return slicedFiles.reduce(reducedFn, true);
} | function | javascript | 649 |
func NewWriterLevel(w io.Writer, level int) (*Writer, error) {
if level < 1 || level > MaxCompression {
return nil, fmt.Errorf("fpc: invalid compression level: %d", level)
}
z := &Writer{
w: w,
level: level,
enc: newBlockEncoder(w, uint(level)),
}
return z, nil
} | function | go | 650 |
tryAdd(eventData, options = {}) {
throwTypeErrorIfParameterMissing(this._context.connectionId, "tryAdd", "eventData", eventData);
const previouslyInstrumented = Boolean(eventData.properties && eventData.properties[TRACEPARENT_PROPERTY]);
if (!previouslyInstrumented) {
const messageSpan = createMessageSpan(options.parentSpan);
eventData = instrumentEventData(eventData, messageSpan);
this._spanContexts.push(messageSpan.context());
messageSpan.end();
}
const amqpMessage = toAmqpMessage(eventData, this._partitionKey);
amqpMessage.body = this._context.dataTransformer.encode(eventData.body);
this._encodedMessages.push(rheaPromise.message.encode(amqpMessage));
const batchMessage = {
body: rheaPromise.message.data_sections(this._encodedMessages)
};
if (amqpMessage.message_annotations) {
batchMessage.message_annotations = amqpMessage.message_annotations;
}
const encodedBatchMessage = rheaPromise.message.encode(batchMessage);
const currentSize = encodedBatchMessage.length;
if (currentSize > this._maxSizeInBytes) {
this._encodedMessages.pop();
if (!previouslyInstrumented &&
Boolean(eventData.properties && eventData.properties[TRACEPARENT_PROPERTY])) {
this._spanContexts.pop();
}
return false;
}
this._batchMessage = encodedBatchMessage;
this._sizeInBytes = currentSize;
this._count++;
return true;
} | function | javascript | 651 |
def asRgb(c, *args):
if args:
if not isinstance(c, (list, tuple)):
c = [c]
c = list(c)
for arg in args:
c.append(arg)
if isinstance(c, (list, tuple)):
rcc = []
for cc in c:
cc = asRgb(cc)
rcc.append(cc)
return tuple(rcc)
if not isinstance(c, Color):
c = color(c)
if c is not None:
return c.rgb
return None | function | python | 652 |
def emit(self, record):
self.buffer.append(record)
while len(self.buffer) != 0:
nextRecord = self.buffer.popleft()
super().emit(nextRecord)
if self.sock is None:
self.buffer.appendleft(nextRecord)
break | function | python | 653 |
private byte[] serializeData(@NonNull StoreFile storeFile)
throws XmlPullParserException, IOException {
List<StoreData> storeDataList = retrieveStoreDataListForStoreFile(storeFile);
final XmlSerializer out = new FastXmlSerializer();
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
out.setOutput(outputStream, StandardCharsets.UTF_8.name());
XmlUtil.writeDocumentStart(out, XML_TAG_DOCUMENT_HEADER);
XmlUtil.writeNextValue(out, XML_TAG_VERSION, CURRENT_CONFIG_STORE_DATA_VERSION);
for (StoreData storeData : storeDataList) {
String tag = storeData.getName();
XmlUtil.writeNextSectionStart(out, tag);
storeData.serializeData(out, storeFile.getEncryptionUtil());
XmlUtil.writeNextSectionEnd(out, tag);
}
XmlUtil.writeDocumentEnd(out, XML_TAG_DOCUMENT_HEADER);
return outputStream.toByteArray();
} | function | java | 654 |
private class Worker implements Runnable{
@Override
public void run() {
Log.i(TAG, "dispatching packet");
while(true){
String packet = null;
try {
packet = packetQueue.take();
} catch (InterruptedException e) {
e.printStackTrace();
}
if(packet!=null){
int wrapperNum = -1;
StringBuilder s = new StringBuilder();
try{
JSONObject pack = new JSONObject(packet);
wrapperNum = pack.getInt("id");
String time = pack.getString("time");
JSONArray dataPacksArray = pack.getJSONArray("data");
wrappers.get(wrapperNum).distributePackages(dataPacksArray, time);
}catch(Exception e){
e.printStackTrace();
}
}else{
SystemClock.sleep(100);
}
if (Thread.currentThread().interrupted()) {
return;
}
}
}
} | class | java | 655 |
static private CuiTerm createCuiTuiTerm(final String... columns) {
if (columns.length < 2) {
return null;
}
final int cuiIndex = 0;
int termIndex = 1;
if (columns.length >= 3) {
termIndex = 2;
}
if (columns[cuiIndex].trim().isEmpty() || columns[termIndex].trim().isEmpty()) {
return null;
}
final String cui = columns[cuiIndex];
String tempTerm = "";
tempTerm = tempTerm.toLowerCase();
final String term = columns[termIndex].trim();
return new CuiTerm(cui, term);
} | function | java | 656 |
protected void addMessageComponents(Container container,
GridBagConstraints cons,
Object msg, int maxll,
boolean internallyCreated) {
if (msg == null) {
return;
}
if (msg instanceof Component) {
if (msg instanceof JScrollPane || msg instanceof JPanel) {
cons.fill = GridBagConstraints.BOTH;
cons.weighty = 1;
} else {
cons.fill = GridBagConstraints.HORIZONTAL;
}
cons.weightx = 1;
container.add((Component) msg, cons);
cons.weightx = 0;
cons.weighty = 0;
cons.fill = GridBagConstraints.NONE;
cons.gridy++;
if (!internallyCreated) {
hasCustomComponents = true;
}
} else if (msg instanceof Object[]) {
Object [] msgs = (Object[]) msg;
for (Object o : msgs) {
addMessageComponents(container, cons, o, maxll, false);
}
} else if (msg instanceof Icon) {
JLabel label = new JLabel( (Icon)msg, SwingConstants.CENTER );
configureMessageLabel(label);
addMessageComponents(container, cons, label, maxll, true);
} else {
String s = msg.toString();
int len = s.length();
if (len <= 0) {
return;
}
int nl;
int nll = 0;
if ((nl = s.indexOf(newline)) >= 0) {
nll = newline.length();
} else if ((nl = s.indexOf("\r\n")) >= 0) {
nll = 2;
} else if ((nl = s.indexOf('\n')) >= 0) {
nll = 1;
}
if (nl >= 0) {
if (nl == 0) {
JPanel breakPanel = new JPanel() {
public Dimension getPreferredSize() {
Font f = getFont();
if (f != null) {
return new Dimension(1, f.getSize() + 2);
}
return new Dimension(0, 0);
}
};
breakPanel.setName("OptionPane.break");
addMessageComponents(container, cons, breakPanel, maxll,
true);
} else {
addMessageComponents(container, cons, s.substring(0, nl),
maxll, false);
}
addMessageComponents(container, cons, s.substring(nl + nll), maxll,
false);
} else if (len > maxll) {
Container c = Box.createVerticalBox();
c.setName("OptionPane.verticalBox");
burstStringInto(c, s, maxll);
addMessageComponents(container, cons, c, maxll, true );
} else {
JLabel label;
label = new JLabel( s, JLabel.LEADING );
label.setName("OptionPane.label");
configureMessageLabel(label);
addMessageComponents(container, cons, label, maxll, true);
}
}
} | function | java | 657 |
class NewInstance {
/**
* Creates a new instance of the specified class name
*
* Package private so this code is not exposed at the API level.
*/
static Object newInstance(ClassLoader classLoader, String className)
throws ClassNotFoundException, IllegalAccessException,
InstantiationException {
Class driverClass;
if (classLoader == null) {
driverClass = Class.forName(className);
} else {
driverClass = classLoader.loadClass(className);
}
return driverClass.newInstance();
}
/**
* Figure out which ClassLoader to use. For JDK 1.2 and later use the
* context ClassLoader.
*/
static ClassLoader getClassLoader() {
Method m = null;
try {
m = Thread.class.getMethod("getContextClassLoader");
} catch (NoSuchMethodException e) {
// Assume that we are running JDK 1.1, use the current ClassLoader
return NewInstance.class.getClassLoader();
}
try {
return (ClassLoader) m.invoke(Thread.currentThread());
} catch (IllegalAccessException e) {
// assert(false)
throw new UnknownError(e.getMessage());
} catch (InvocationTargetException e) {
// assert(e.getTargetException() instanceof SecurityException)
throw new UnknownError(e.getMessage());
}
}
} | class | java | 658 |
class API:
"""
The SpaceUp Client API
::
from space_api import API
api = API("My-Project", "localhost:4124")
:param project_id: (str) The project ID
:param url: (str) The base URL of space-cloud server
"""
def __init__(self, project_id: str, url: str):
self.project_id = project_id
if url.startswith("http://"):
self.url = url.lstrip("http://")
elif url.startswith("https://"):
self.url = url.lstrip("https://")
else:
self.url = url
self.token = None
self.transport = Transport(self.url, self.project_id)
def close(self):
"""
Closes the communication channel
"""
self.transport.close()
def connect(self):
"""
Connects to the Space Cloud Instance
"""
self.transport.connect()
def set_token(self, token: str):
"""
Sets the JWT Token
:param token: (str) The signed JWT token received from the server on successful authentication
"""
self.token = token
self.transport.token = token
def set_project_id(self, project_id: str):
"""
Sets the Project ID
:param project_id: (str) The project ID
"""
self.project_id = project_id
self.transport.project_id = project_id
def mongo(self) -> 'DB':
"""
Returns a MongoDB client instance
:return: MongoDB client instance
"""
return DB(self.transport, constants.Mongo)
def postgres(self) -> 'DB':
"""
Returns a Postgres client instance
:return: Postgres client instance
"""
return DB(self.transport, constants.Postgres)
def my_sql(self) -> 'DB':
"""
Returns a MySQL client instance
:return: MySQL client instance
"""
return DB(self.transport, constants.MySQL)
def __str__(self) -> str:
return f'SpaceAPI(project_id:{self.project_id}, url:{self.url}, token:{self.token})'
def call(self, service_name: str, func_name: str, params, timeout: Optional[int] = 5000) -> Response:
"""
Calls a function from Function as a Service Engine
::
response = api.call('my-service', 'my-func', { msg: 'Function as a Service is awesome!' }, 1000)
:param service_name: (str) The name of service(engine) with which the function is registered
:param func_name: (str) The name of function to be called
:param params: The params for the function
:param timeout: (int) The (optional) timeout in milliseconds (defaults to 5000)
:return: (Response) The response object containing values corresponding to the request
"""
return self.transport.faas(service_name, func_name, params, timeout)
def service(self, service: str) -> 'Service':
"""
Returns a Service instance
:param service: (str) The name of the service
:return: (Service) The Service instance
"""
return Service(self.transport, service)
def file_store(self) -> 'FileStore':
"""
Returns a FileStore instance
:return: (FileStore) The FileStore instance
"""
return FileStore(self.transport)
def pubsub(self) -> 'Pubsub':
"""
Returns a Pubsub instance
:return: (Pubsub) The Pubsub instance
"""
return Pubsub(self.transport) | class | python | 659 |
public static <N, E> Set<E> parallelEdges(Network<N, E> graph, Object edge) {
if (graph instanceof Hypergraph) {
throw new UnsupportedOperationException();
}
Set<N> incidentNodes = graph.incidentNodes(edge);
if (!graph.allowsParallelEdges()) {
return ImmutableSet.of();
}
Iterator<N> incidentNodesIterator = incidentNodes.iterator();
N node1 = incidentNodesIterator.next();
N node2 = incidentNodesIterator.hasNext() ? incidentNodesIterator.next() : node1;
return Sets.difference(graph.edgesConnecting(node1, node2), ImmutableSet.of(edge));
} | function | java | 660 |
private EventImpl seeThru(EventImpl x, long m, long m2) {
if (x == null) {
return null;
}
if (m == m2 && m2 == x.getCreatorId()) {
return firstSelfWitnessS(x.getSelfParent());
}
return firstSee(lastSee(x, m2), m);
} | function | java | 661 |
public void RaycastView(float projectileInitDistance, out Vector3 projectileInitPosition, out Quaternion lookDirection)
{
float maxDistance = 150.0f;
float minDistance = projectileInitDistance;
int screenX = Renderer.ScreenWidth / 2;
int screenY = Renderer.ScreenHeight / 2;
Camera.ScreenPointToWorldPoint(screenX, screenY, maxDistance, out Vector3 origin);
Camera.ScreenPointToDirection(screenX, screenY, out Vector3 direction);
Vector3 forward = _player.Entity.Forward;
forward = Camera.TransformDirection(forward);
lookDirection = _camera.Entity.Rotation;
projectileInitPosition = origin + forward * minDistance;
if (Physics.Raycast(origin, direction, maxDistance, EntityRaycastFlags.All, out RaycastHit hit, _player.Entity.Physics))
{
_debugRaycastPoint.Position = hit.Point;
if (hit.Distance < minDistance)
{
projectileInitPosition = hit.Point;
}
}
else
{
_debugRaycastPoint.Position = _player.Entity.Position + Vector3.Down;
}
} | function | c# | 662 |
boolean findRemainingCameraMatrices(LookupSimilarImages db, View seed, GrowQueue_I32 motions) {
points3D.reset();
for (int i = 0; i < structure.points.size; i++) {
structure.points.data[i].get(points3D.grow());
}
assocPixel.reset();
for (int i = 0; i < inlierToSeed.size; i++) {
assocPixel.grow().p1.set(matchesTriple.get(i).p1);
}
DMatrixRMaj cameraMatrix = new DMatrixRMaj(3,4);
for (int motionIdx = 0; motionIdx < motions.size; motionIdx++) {
if( motionIdx == selectedTriple[0] || motionIdx == selectedTriple[1])
continue;
int connectionIdx = motions.get(motionIdx);
Motion edge = seed.connections.get(connectionIdx);
View viewI = edge.other(seed);
db.lookupPixelFeats(viewI.id,featsB);
if ( !computeCameraMatrix(seed, edge,featsB,cameraMatrix) ) {
if( verbose != null ) {
verbose.println("Pose estimator failed! motionIdx="+motionIdx);
}
return false;
}
db.lookupShape(edge.other(seed).id,shape);
structure.setView(motionIdx,false,cameraMatrix,shape.width,shape.height);
}
return true;
} | function | java | 663 |
def find_common_topic(self):
self._rewind()
all_conversations = self._preprocess(self.get_all_conversations())
mecab = Mecab()
category = ['NNP', 'NNG']
keywords = [classification[0] for classification in mecab.pos(str(all_conversations)) if classification[1] in category]
freq = Counter(keywords).most_common(300)
return freq | function | python | 664 |
@EventHandler(priority = EventPriority.MONITOR)
public void onAdminJoin(PlayerJoinEvent event) {
if (PermissionUtils.hasPermission(event.getPlayer(), Permission.ADMIN_ALL, false)) {
TranslationUtils.sendMessage(event.getPlayer(),
"&lAn update is available: &b&l" + latestVersion.getOriginalString());
}
} | function | java | 665 |
public static Tween DOMoveToY(this Rigidbody2D rigidbody, Transform target, float speed, float offset = 0f)
{
float y = target.position.x;
if (offset > 0f)
{
y += offset * (rigidbody.position.y > y ? 1f : -1f);
}
return rigidbody.DOMoveX(y, Mathf.Abs(rigidbody.position.y - y) / speed)
.SetUpdate(UpdateType.Fixed);
} | function | c# | 666 |
def report_script_err(pipe_element, task, dbm, msg):
if pipe_element.error_msg is None:
pipe_element.error_msg = str(msg)
else:
pipe_element.error_msg += str(msg)
debug_info = "\nPipeElementID = {}".format(pipe_element.idx)
pipe_element.error_msg += debug_info
pipe_element.state = state.PipeElement.SCRIPT_ERROR
task.state = state.Pipe.ERROR
dbm.add(task)
dbm.add(pipe_element)
dbm.commit() | function | python | 667 |
def is_builtin_class_method(obj: Any, attr_name: str) -> bool:
try:
mro = getmro(obj)
cls = next(c for c in mro if attr_name in safe_getattr(c, '__dict__', {}))
except StopIteration:
return False
try:
name = safe_getattr(cls, '__name__')
except AttributeError:
return False
return getattr(builtins, name, None) is cls | function | python | 668 |
func (r *InstanceReconciler) statusProgress(ctx context.Context, ns, name string, log logr.Logger) (int, error) {
var sts appsv1.StatefulSetList
if err := r.List(ctx, &sts, client.InNamespace(ns)); err != nil {
log.Error(err, "failed to get a list of StatefulSets to check status")
return 0, err
}
if len(sts.Items) < 1 {
return 0, fmt.Errorf("failed to find a StatefulSet, found: %d", len(sts.Items))
}
var foundSts *appsv1.StatefulSet
for index, s := range sts.Items {
if s.Name == name {
foundSts = &sts.Items[index]
}
}
if foundSts == nil {
return 0, fmt.Errorf("failed to find the right StatefulSet %s (out of %d)", name, len(sts.Items))
}
log.Info("found the right StatefulSet", "foundSts", &foundSts.Name,
"sts.Status.CurrentReplicas", &foundSts.Status.CurrentReplicas, "sts.Status.ReadyReplicas", foundSts.Status.ReadyReplicas)
if foundSts.Status.CurrentReplicas != 1 {
return 10, fmt.Errorf("StatefulSet is not ready yet? (failed to find the expected number of current replicas): %d", foundSts.Status.CurrentReplicas)
}
if foundSts.Status.ReadyReplicas != 1 {
return 50, fmt.Errorf("StatefulSet is not ready yet? (failed to find the expected number of ready replicas): %d", foundSts.Status.ReadyReplicas)
}
var pods corev1.PodList
if err := r.List(ctx, &pods, client.InNamespace(ns), client.MatchingLabels{"statefulset": name}); err != nil {
log.Error(err, "failed to get a list of Pods to check status")
return 60, err
}
if len(pods.Items) < 1 {
return 65, fmt.Errorf("failed to find enough pods, found: %d pods", len(pods.Items))
}
var foundPod *corev1.Pod
for index, p := range pods.Items {
if p.Name == name+"-0" {
foundPod = &pods.Items[index]
}
}
if foundPod == nil {
return 75, fmt.Errorf("failed to find the right Pod %s (out of %d)", name+"-0", len(pods.Items))
}
log.Info("found the right Pod", "pod.Name", &foundPod.Name, "pod.Status", foundPod.Status.Phase, "#containers", len(foundPod.Status.ContainerStatuses))
if foundPod.Status.Phase != "Running" {
return 85, fmt.Errorf("failed to find the right Pod %s in status Running: %s", name+"-0", foundPod.Status.Phase)
}
for _, c := range foundPod.Status.ContainerStatuses {
if c.Name == databasecontroller.DatabaseContainerName && c.Ready {
return 100, nil
}
}
return 85, fmt.Errorf("failed to find a database container in %+v", foundPod.Status.ContainerStatuses)
} | function | go | 669 |
func Release(db *DB, yamls *gcr.AppYamls, logger *zap.Logger, errorScope report.Scope, app *model.TuberApp, digest string, data *ClusterData, slackClient *slack.Client, diffText string, sentryBearerToken string) error {
return releaser{
logger: logger,
errorScope: errorScope,
releaseYamls: yamls.Release,
prereleaseYamls: yamls.Prerelease,
postreleaseYamls: yamls.PostRelease,
tags: yamls.Tags,
app: app,
digest: digest,
data: data,
db: db,
slackClient: slackClient,
diffText: diffText,
sentryBearerToken: sentryBearerToken,
}.release()
} | function | go | 670 |
public abstract class ValidationPlan {
protected ValidationResult validationResult;
protected ValidationScope validationScope;
private boolean devMode = false;
private FileType fileType = null;
private boolean remote= false;
protected EmblEntryValidationPlanProperty planProperty;
protected EntryDAOUtils entryDAOUtils;
protected EraproDAOUtils eraproDAOUtils;
public ValidationPlan(EmblEntryValidationPlanProperty property)
{
this(property.validationScope.get(),property.isDevMode.get());
this.planProperty=property;
this.planProperty.taxonHelper.set(new TaxonHelperImpl());
this.remote= property.isRemote.get();
}
/**
*
* @param validationScope - the validation scope
* @param devMode - true if the validator is being run in development mode (remote tsv files for editing)
*/
public ValidationPlan(ValidationScope validationScope,
boolean devMode) {//DELETE this constructor if there are references
this.validationScope = validationScope;
this.devMode = devMode;
}
public void addMessageBundle(String bundleName){
ValidationMessageManager.addBundle(bundleName);
}
/**
*
*
* @param target
* @return
* @throws ValidationEngineException
*/
public abstract ValidationResult execute(Object target)
throws ValidationEngineException;
protected ValidationResult execute(EmblEntryValidationCheck<?>[] checks, Object target)
throws ValidationEngineException {
ValidationResult result = new ValidationResult();
for (EmblEntryValidationCheck<?> check : checks) {
result.append(execute(check, target));
}
return result;
}
/**
* Executes a validation check.
*
* @param check a validation check to be executed
* @param target target object to be checked
* @return a validation result
* @throws ValidationEngineException
*/
@SuppressWarnings("unchecked")
public ValidationResult execute(ValidationCheck check, Object target) throws ValidationEngineException {
if (check == null)
{
return validationResult;
}
try
{
check.setEmblEntryValidationPlanProperty(planProperty);
if(planProperty.enproConnection.get()!=null&&entryDAOUtils==null)
{
entryDAOUtils= EntryDAOUtilsImpl.getEntryDAOUtilsImpl(planProperty.enproConnection.get());
}
check.setEntryDAOUtils(entryDAOUtils);
if(planProperty.eraproConnection.get()!=null&&eraproDAOUtils==null)
{
eraproDAOUtils = new EraproDAOUtilsImpl(planProperty.eraproConnection.get());
}
check.setEraproDAOUtils(eraproDAOUtils);
}catch(Exception e)
{
throw new ValidationEngineException(e);
}
//long start= System.currentTimeMillis();
Class<? extends ValidationCheck> checkClass = check.getClass();
ExcludeScope excludeScopeAnnotation = checkClass.getAnnotation(ExcludeScope.class);
RemoteExclude remoteExclude = checkClass.getAnnotation(RemoteExclude.class);
Description descAnnotation = checkClass.getAnnotation(Description.class);
GroupIncludeScope groupIncludeAnnotation = checkClass.getAnnotation(GroupIncludeScope.class);
if(remoteExclude!=null&&remote)
{
return validationResult;
}
if (excludeScopeAnnotation != null && isInValidationScope(excludeScopeAnnotation.validationScope())) {
return validationResult;
}
if(groupIncludeAnnotation!=null && !isInValidationScopeGroup(groupIncludeAnnotation.group()))
{
return validationResult;
}
// inject data sets
/*if(null != checkDataSetAnnotation) {
Stream.of(checkDataSetAnnotation.dataSetNames()).forEach( dsName -> GlobalDataSets.loadIfNotExist(dsName, dataManager, fileManager, devMode));
}
*/
validationResult.append(check.check(target));
if (excludeScopeAnnotation != null) {
demoteSeverity(validationResult, excludeScopeAnnotation.maxSeverity());
}
if(groupIncludeAnnotation!=null)
{
demoteSeverity(validationResult, groupIncludeAnnotation.maxSeverity());
}
// System.out.println(this.result.count());
return validationResult;
}
/**
* Demotes of severity to a specified level (maxSeverity) for all messages.
*
* @param planResult a validation result
* @param maxSeverity a maximum severity
*/
protected void demoteSeverity(ValidationResult planResult,
Severity maxSeverity) {
if (Severity.ERROR.equals(maxSeverity)) {
return;
}
for (ValidationMessage<?> message : planResult.getMessages()) {
switch (message.getSeverity()) {
case ERROR:
message.setSeverity(maxSeverity);
break;
case WARNING:
message.setSeverity(maxSeverity);
break;
}
}
}
protected boolean isInValidationScope(ValidationScope[] validationScopes) {
if (validationScopes == null) {
return false;
}
for (ValidationScope scope : validationScopes) {
if (scope == null) {
continue;
}
if (scope.equals(validationScope))
return true;
}
return false;
}
protected boolean isInValidationScopeGroup(ValidationScope.Group[] validationScopeGroups) {
if (validationScopeGroups == null) {
return false;
}
for (ValidationScope.Group groupScope : validationScopeGroups) {
if (groupScope == null) {
continue;
}
if (groupScope.equals(validationScope.group()))
return true;
}
return false;
}
} | class | java | 671 |
void read_infile(char *filename, GENE_DATA *pdata) {
FILE *fh;
int i, j;
double ftemp;
fh=fopen(filename, "r");
fscanf(fh, "%s", pdata->name);
for (j=0; j<pdata->ncol; j++)
fscanf(fh, "%d", pdata->L+j);
for (i=0; i<pdata->nrow; i++) {
fscanf(fh, "%s", pdata->id[i]);
for (j=0; j<pdata->ncol; j++) {
fscanf(fh, "%lg", &ftemp);
pdata->d[i][j]=ftemp;
}
}
fclose(fh);
} | function | c | 672 |
public class MeleeWeaponJsonProcessor extends ArmoryEntryJsonProcessor<MeleeWeaponData>
{
public MeleeWeaponJsonProcessor()
{
super(ArmoryAPI.MELEE_WEAPON_ID);
keyHandler = new JsonProcessorInjectionMap(MeleeWeaponData.class);
debugPrinter = JsonContentLoader.INSTANCE != null ? JsonContentLoader.INSTANCE.debug : new DebugPrinter(LogManager.getLogger());
}
@Override
public String getLoadOrder()
{
return null;
}
@Override
public MeleeWeaponData process(JsonElement element)
{
debugPrinter.start("MeleeWeaponProcessor", "Processing entry", Engine.runningAsDev);
final JsonObject weaponJSON = element.getAsJsonObject();
ensureValuesExist(weaponJSON, "id", "name");
//Get common data
String id = weaponJSON.get("id").getAsString();
String name = weaponJSON.get("name").getAsString();
debugPrinter.log("Name: " + name);
debugPrinter.log("ID: " + id);
//Create object
MeleeWeaponData weaponData = new MeleeWeaponData(this, id, name);
//Process shared data
processExtraData(weaponJSON, weaponData);
debugPrinter.end("Done...");
return weaponData;
}
@Override
public boolean addData(String key, JsonElement data, MeleeWeaponData generatedObject)
{
return super.addData(key, data, generatedObject); //TODO consider special handling for damage types
}
@Override
public boolean removeData(String key, MeleeWeaponData generatedObject)
{
return super.removeData(key, generatedObject);
}
@Override
public boolean replaceData(String key, JsonElement data, MeleeWeaponData generatedObject)
{
return super.replaceData(key, data, generatedObject);
}
} | class | java | 673 |
def _fetch_rates(self, url, date=None):
if self.latest_rates and not date:
return self.latest_rates
response = requests.get(url)
rss = ET.fromstring(response.text)
date = datetime.datetime.strptime(
response.headers['date'], '%a, %d %b %Y %H:%M:%S %Z')
return {'rss': rss, 'date': date} | function | python | 674 |
def prepare(
input_data: T.Union[pd.DataFrame, Path],
*,
output_file: T.Optional[Path] = None,
test_size: float = 0.2
) -> pd.DataFrame:
if isinstance(input_data, Path):
input_data = pd.read_csv(input_data)
features = [
"X",
"Y",
"region",
"crashYear",
"holiday",
"crashSHDescription",
"flatHill",
"NumberOfLanes",
"roadCharacter",
"roadLane",
"roadSurface",
"speedLimit",
"streetLight",
"light",
"weatherA",
"weatherB",
"crashSeverity",
]
input_data = input_data[features].copy()
input_data = input_data[input_data.X > 0].copy()
input_data.dropna(
subset=["crashSHDescription", "region", "NumberOfLanes"], inplace=True
)
input_data["holiday"].fillna("Normal day", inplace=True)
input_data["LSZ"] = input_data["speedLimit"].isna()
input_data["speedLimit"].fillna(100, inplace=True)
input_data.replace("Null", "Unknown", inplace=True)
input_data.sort_values(by="crashYear", ascending=True, inplace=True)
input_data = input_data.groupby("crashYear").sample(frac=1, random_state=42)
test_idx = int(len(input_data) * test_size)
input_data["fold"] = "train"
input_data.loc[input_data.index[-test_idx:], "fold"] = "test"
input_data["injuryCrash"] = input_data["crashSeverity"] != "Non-Injury Crash"
input_data.drop(columns="crashSeverity", inplace=True)
if output_file is not None:
input_data.to_csv(output_file, index=False)
return input_data | function | python | 675 |
private void rollbackSchemaCreation(List<Table> viewsCreated, List<Table> tableConstraintsCreated, List<Table> tablesCreated)
{
if (NucleusLogger.DATASTORE_SCHEMA.isDebugEnabled())
{
NucleusLogger.DATASTORE_SCHEMA.debug(Localiser.msg("050040"));
}
try
{
if (viewsCreated != null)
{
ListIterator li = viewsCreated.listIterator(viewsCreated.size());
while (li.hasPrevious())
{
((ViewImpl) li.previous()).drop(getCurrentConnection());
}
}
if( tableConstraintsCreated != null)
{
ListIterator li = tableConstraintsCreated.listIterator(tableConstraintsCreated.size());
while (li.hasPrevious())
{
((TableImpl) li.previous()).dropConstraints(getCurrentConnection());
}
}
if (tablesCreated != null)
{
ListIterator li = tablesCreated.listIterator(tablesCreated.size());
while (li.hasPrevious())
{
((TableImpl) li.previous()).drop(getCurrentConnection());
}
}
}
catch (Exception e)
{
NucleusLogger.DATASTORE_SCHEMA.warn(Localiser.msg("050041", e));
}
AutoStartMechanism starter = rdbmsMgr.getNucleusContext().getAutoStartMechanism();
if (starter != null)
{
try
{
if (!starter.isOpen())
{
starter.open();
}
for (RDBMSStoreData sd : schemaDataAdded)
{
starter.deleteClass(sd.getName());
}
}
finally
{
if (starter.isOpen())
{
starter.close();
}
}
}
} | function | java | 676 |
public void add(final CommandGroup group) {
Add a group for this CommandGroup
addGroup(group.getName(), group.getDescription());
Look at all the methods and find the annotated ones, if any
Method[] methods = group.getClass().getMethods();
for (final Method m : methods) {
String methodName = group.getClass().getName() + "#" + m.getName();
final Command cmd = m.getAnnotation(Command.class);
if (cmd == null) {
continue;
}
Let's see if the params are of acceptable types
Parameter[] params = m.getParameters();
if (params.length == 0 || params[0].getType() != CommandInterpreter.class) {
logger.warning(methodName +
" must have CommandInterpreter for its first parameter");
continue;
}
Make sure that the parameter types are supported.
for (int i = 1; i < params.length; i++) {
Check the parameter against our
if (!supportedMethodParameters.contains(params[i].getType())
&& !params[i].getType().isEnum()) {
logger.warning(methodName
+ " has unsupported parameter type "
+ params[i].getType().getSimpleName());
}
If the parameter type is array of String, then it needs
to be the last parameter.
if(params[i].getType() == String[].class) {
if(i != params.length - 1) {
logger.warning(String.format("%s has String[] parameter which is not last", methodName));
}
}
}
Also check to see that if we have optional parameters, we
don't have any non-optional ones after the optional ones.
boolean foundOptional = false;
for (Parameter p : params) {
Optional opt = p.getAnnotation(Optional.class);
if (foundOptional && opt == null) {
logger.warning(methodName
+ " has non-optional parameter following optional parameter.");
continue;
}
if (opt != null) {
foundOptional = true;
}
}
And check return type
if (m.getReturnType() != String.class) {
logger.warning(methodName +
" has wrong return type. Expected String");
continue;
}
Now let's see if there's a method to get the completers for
this method. First, check for an explicit one in the
annotation itself.
Method completerMtd = null;
if (!cmd.completers().isEmpty()) {
try {
completerMtd = group.getClass().getMethod(cmd.completers(), (Class<?>[])null);
} catch (NoSuchMethodException e) {
logger.warning(methodName +
" references a non-existant completer method: "
+ cmd.completers());
}
}
If we didn't get a completer that way, try to see if there's
a method with a name that conforms to the convention.
try {
completerMtd = group.getClass().getMethod(m.getName() + "Completers", (Class<?>[])null);
} catch (NoSuchMethodException e) {
This is okay, probably they just didn't want a completer.
logger.finer(methodName + " has no completers");
}
Get a completer for this method and add it in to our shell
CommandInterface ci = methodToCommand(m, cmd.usage(), group, completerMtd);
add(m.getName(), group.getName(), ci);
if (!cmd.alias().isEmpty()) {
addAlias(m.getName(), cmd.alias());
}
}
} | function | java | 677 |
def _parse_query(self, filters=None, feats=None, dates=None):
query_lst = []
if not self.rapi_collections:
self.get_collections()
if self.rapi_collections is None:
return None
if dates is not None and not str(dates).strip() == '':
self.dates = dates
if self.dates is not None:
field_id = self._get_field_id('Acquisition Start Date')
if field_id is None:
field_id = self._get_field_id('Start Date')
date_queries = []
for rng in self.dates:
start = None
end = None
if isinstance(rng, str):
time_words = ['hour', 'day', 'week', 'month', 'year']
if any(word in rng for word in time_words):
start = dateparser.parse(rng).strftime("%Y%m%d_%H%M%S")
end = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
else:
if 'start' not in rng.keys():
break
start = self._convert_date(rng.get('start'),
"%Y%m%d_%H%M%S",
out_form="%Y-%m-%dT%H:%M:%SZ")
end = self._convert_date(rng.get('end'), "%Y%m%d_%H%M%S",
out_form="%Y-%m-%dT%H:%M:%SZ")
if start is None or end is None:
continue
date_queries.append(f"{field_id}>='{start}' AND "
f"{field_id}<='{end}'")
if len(date_queries) > 0:
query_lst.append(f"({' OR '.join(date_queries)})")
if feats is None:
feats = self.feats
if feats is not None:
geom_lst = []
for idx, f in enumerate(feats):
op = f[0].upper()
src = f[1]
self.geoms = self.geo.add_geom(src)
if self.geoms is None or isinstance(self.geoms, SyntaxError):
msg = f"Geometry feature #{str(idx + 1)} could not be " \
f"determined. Excluding it from search."
self.log_msg(msg, 'warning')
else:
field_id = self._get_field_id('Footprint')
self.geoms = [self.geoms] \
if not isinstance(self.geoms, list) else self.geoms
for g in self.geoms:
if op == '=':
geom_lst.append(f"{field_id}{op}'{g}'")
else:
geom_lst.append(f'{field_id} {op} {g}')
if len(geom_lst) > 0:
query_lst.append(f"({' OR '.join(geom_lst)})")
if filters is not None:
for field, values in filters.items():
field_id = self._get_field_id(field)
if field_id is None:
msg = f"No available field named '{field}'."
self.log_msg(msg, 'warning')
continue
d_type = self._get_field_type(self.collection, field_id)
op = values[0]
val = values[1]
if not any(c in op for c in '=><'):
op = ' %s ' % op
if field == 'Incidence Angle' or field == 'Scale' or \
field == 'Spacial Resolution' or \
field == 'Absolute Orbit':
if isinstance(val, list) or isinstance(val, tuple):
for v in val:
if v.find('-') > -1:
start, end = v.split('-')
val_query = self._parse_range(field_id, start,
end)
else:
val_query = f"{field_id}{op}{v}"
query_lst.append(val_query)
continue
else:
if str(val).find('-') > -1:
start, end = str(val).split('-')
val_query = self._parse_range(field_id, start, end)
else:
val_query = f"{field_id}{op}{val}"
elif field == 'Footprint':
pnts = []
vals = val.split(' ')
for idx in range(0, len(vals), 2):
if vals[idx].strip() == '':
continue
pnts.append((float(vals[idx]), float(vals[idx + 1])))
self.geoms = self.geo.add_geom(pnts)
val_query = f"{field_id}{op}{self.geoms}"
else:
if isinstance(val, list) or isinstance(val, tuple):
val_query = self._build_or(field_id, op, val, d_type)
else:
if d_type == 'String':
val_query = f"{field_id}{op}'{val}'"
elif d_type == 'DateTimeRange':
date = dateutil.parser.parse(val)
iso_date = date.isoformat()
val_query = f"{field_id}{op}'{iso_date}'"
else:
val_query = f"{field_id}{op}{val}"
query_lst.append(val_query)
if len(query_lst) > 1:
query_lst = ['(%s)' % q if q.find(' OR ') > -1 else q
for q in query_lst]
full_query = ' AND '.join(query_lst)
return full_query | function | python | 678 |
class BackedTexture extends DataTexture {
constructor(renderer, width, height, channels, options) {
super(renderer, width, height, channels, options);
this.data = new this.ctor(this.n);
}
resize(width, height) {
const old = this.data;
const oldWidth = this.width;
const oldHeight = this.height;
this.width = width;
this.height = height;
this.n = width * height * this.channels;
this.data = new this.ctor(this.n);
const { gl } = this;
const state = this.renderer.state;
state.bindTexture(gl.TEXTURE_2D, this.texture);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, this.format, width, height, 0, this.format, this.type, this.data);
this.uniforms.dataResolution.value.set(1 / width, 1 / height);
return this.write(old, 0, 0, oldWidth, oldHeight);
}
write(src, x, y, w, h) {
let j;
const { width } = this;
const dst = this.data;
const { channels } = this;
let i = 0;
if (width === w && x === 0) {
j = y * w * channels;
const n = w * h * channels;
while (i < n) {
dst[j++] = src[i++];
}
}
else {
const stride = width * channels;
const ww = w * channels;
const xx = x * channels;
let yy = y;
const yh = y + h;
while (yy < yh) {
let k = 0;
j = xx + yy * stride;
while (k++ < ww) {
dst[j++] = src[i++];
}
yy++;
}
}
return super.write(src, x, y, w, h);
}
dispose() {
this.data = null;
return super.dispose();
}
} | class | javascript | 679 |
pub fn make_active_set_entries(
active_keypair: &Arc<Keypair>,
token_source: &Keypair,
stake: u64,
tick_height_to_vote_on: u64,
last_entry_id: &Hash,
last_tick_id: &Hash,
num_ending_ticks: u64,
) -> (Vec<Entry>, VotingKeypair) {
// 1) Assume the active_keypair node has no tokens staked
let transfer_tx = SystemTransaction::new_account(
&token_source,
active_keypair.pubkey(),
stake,
*last_tick_id,
0,
);
let mut last_entry_id = *last_entry_id;
let transfer_entry = next_entry_mut(&mut last_entry_id, 1, vec![transfer_tx]);
// 2) Create and register a vote account for active_keypair
let voting_keypair = VotingKeypair::new_local(active_keypair);
let vote_account_id = voting_keypair.pubkey();
let new_vote_account_tx =
VoteTransaction::new_account(active_keypair, vote_account_id, *last_tick_id, 1, 1);
let new_vote_account_entry = next_entry_mut(&mut last_entry_id, 1, vec![new_vote_account_tx]);
// 3) Create vote entry
let vote_tx =
VoteTransaction::new_vote(&voting_keypair, tick_height_to_vote_on, *last_tick_id, 0);
let vote_entry = next_entry_mut(&mut last_entry_id, 1, vec![vote_tx]);
// 4) Create the ending empty ticks
let mut txs = vec![transfer_entry, new_vote_account_entry, vote_entry];
let empty_ticks = create_ticks(num_ending_ticks, last_entry_id);
txs.extend(empty_ticks);
(txs, voting_keypair)
} | function | rust | 680 |
xsltStylesheetPtr
xsltParseStylesheetProcess(xsltStylesheetPtr style, xmlDocPtr doc)
{
xsltCompilerCtxtPtr cctxt;
xmlNodePtr cur;
int oldIsSimplifiedStylesheet;
xsltInitGlobals();
if ((style == NULL) || (doc == NULL))
return(NULL);
cctxt = XSLT_CCTXT(style);
cur = xmlDocGetRootElement(doc);
if (cur == NULL) {
xsltTransformError(NULL, style, (xmlNodePtr) doc,
"xsltParseStylesheetProcess : empty stylesheet\n");
return(NULL);
}
oldIsSimplifiedStylesheet = cctxt->simplified;
if ((IS_XSLT_ELEM(cur)) &&
((IS_XSLT_NAME(cur, "stylesheet")) ||
(IS_XSLT_NAME(cur, "transform")))) {
#ifdef WITH_XSLT_DEBUG_PARSING
xsltGenericDebug(xsltGenericDebugContext,
"xsltParseStylesheetProcess : found stylesheet\n");
#endif
cctxt->simplified = 0;
style->literal_result = 0;
} else {
cctxt->simplified = 1;
style->literal_result = 1;
}
if (! style->nopreproc)
xsltParsePreprocessStylesheetTree(cctxt, cur);
if (style->literal_result == 0) {
if (xsltParseXSLTStylesheetElem(cctxt, cur) != 0)
return(NULL);
} else {
if (xsltParseSimplifiedStylesheetTree(cctxt, doc, cur) != 0)
return(NULL);
}
cctxt->simplified = oldIsSimplifiedStylesheet;
return(style);
} | function | c | 681 |
func (i *interpreter) optimiseExpressions(stmts []*Statement) {
WalkAST(stmts, func(expr *Expression) bool {
if constant := i.scope.Constant(expr); constant != nil {
expr.Optimised = &OptimisedExpression{Constant: constant}
expr.Val = nil
return false
} else if expr.Val != nil && expr.Val.Ident != nil && expr.Val.Call == nil && expr.Op == nil && expr.If == nil && len(expr.Val.Slices) == 0 {
if expr.Val.Property == nil && len(expr.Val.Ident.Action) == 0 {
expr.Optimised = &OptimisedExpression{Local: expr.Val.Ident.Name}
return false
} else if expr.Val.Ident.Name == "CONFIG" && len(expr.Val.Ident.Action) == 1 && expr.Val.Ident.Action[0].Property != nil && len(expr.Val.Ident.Action[0].Property.Action) == 0 {
expr.Optimised = &OptimisedExpression{Config: expr.Val.Ident.Action[0].Property.Name}
expr.Val = nil
return false
}
}
return true
})
} | function | go | 682 |
class Image {
constructor(el) {
this.DOM = { el: el };
// image deafult styles
this.defaultStyle = {
x: 0,
y: 0,
opacity: 0
};
// get sizes/position
this.getRect();
// init/bind events
this.initEvents();
}
initEvents() {
// on resize get updated sizes/position
window.addEventListener('resize', () => this.resize());
}
resize() {
// reset styles
TweenMax.set(this.DOM.el, this.defaultStyle);
// get sizes/position
this.getRect();
}
getRect() {
this.rect = this.DOM.el.getBoundingClientRect();
}
isActive() {
// check if image is animating or if it's visible
return TweenMax.isTweening(this.DOM.el) || this.DOM.el.style.opacity != 0;
}
} | class | javascript | 683 |
public boolean isOccurrence(Calendar calendar, Date occurrence) {
java.util.Calendar cal = Dates.getCalendarInstance(occurrence);
cal.setTime(occurrence);
if(occurrence instanceof DateTime) {
cal.add(java.util.Calendar.SECOND, 1);
}
else {
cal.add(java.util.Calendar.DAY_OF_WEEK, 1);
}
Date rangeEnd =
org.unitedinternet.cosmo.calendar.util.Dates.getInstance(cal.getTime(), occurrence);
TimeZone tz = null;
for(Object obj : calendar.getComponents(Component.VEVENT)){
VEvent evt = (VEvent)obj;
if(evt.getRecurrenceId() == null && evt.getStartDate() != null){
tz = evt.getStartDate().getTimeZone();
}
}
InstanceList instances = getOcurrences(calendar, occurrence, rangeEnd, tz);
for(Iterator<Instance> it = instances.values().iterator(); it.hasNext();) {
Instance instance = it.next();
if(instance.getRid().getTime()==occurrence.getTime()) {
return true;
}
}
return false;
} | function | java | 684 |
void
vl_kdforest_build (VlKDForest * self, vl_size numData, void const * data)
{
vl_uindex di, ti ;
vl_size maxNumNodes ;
double * searchBounds;
assert(data) ;
assert(numData >= 1) ;
self->data = data ;
self->numData = numData ;
self->trees = vl_malloc (sizeof(VlKDTree*) * self->numTrees) ;
maxNumNodes = 0 ;
for (ti = 0 ; ti < self->numTrees ; ++ ti) {
self->trees[ti] = vl_malloc (sizeof(VlKDTree)) ;
self->trees[ti]->dataIndex = vl_malloc (sizeof(VlKDTreeDataIndexEntry) * self->numData) ;
for (di = 0 ; di < self->numData ; ++ di) {
self->trees[ti]->dataIndex[di].index = di ;
}
self->trees[ti]->numUsedNodes = 0 ;
self->trees[ti]->numAllocatedNodes = 2 * self->numData - 1 ;
self->trees[ti]->nodes = vl_malloc (sizeof(VlKDTreeNode) * self->trees[ti]->numAllocatedNodes) ;
self->trees[ti]->depth = 0 ;
vl_kdtree_build_recursively (self, self->trees[ti],
vl_kdtree_node_new(self->trees[ti], 0), 0,
self->numData, 0) ;
maxNumNodes += self->trees[ti]->numUsedNodes ;
}
searchBounds = vl_malloc(sizeof(double) * 2 * self->dimension);
for (ti = 0 ; ti < self->numTrees ; ++ ti) {
double * iter = searchBounds ;
double * end = iter + 2 * self->dimension ;
while (iter < end) {
*iter++ = - VL_INFINITY_F ;
*iter++ = + VL_INFINITY_F ;
}
vl_kdtree_calc_bounds_recursively (self->trees[ti], 0, searchBounds) ;
}
vl_free(searchBounds);
self -> maxNumNodes = maxNumNodes;
} | function | c | 685 |
func buildOSRelease(values map[string]string) string {
var osRelease string
name := values["NAME"]
version := values["VERSION"]
if version == "" {
version = values["VERSION_ID"]
}
if name != "" && version != "" {
osRelease = fmt.Sprintf("%s %s", name, version)
} else {
osRelease = values["PRETTY_NAME"]
}
return osRelease
} | function | go | 686 |
def current(cls):
token = session.get('oauth2_token')
if token is None:
return None
with make_session(token=token) as discord:
data = cache.get_cached_user_data(token)
if data is None:
user = discord.get(DISCORD_API_URL + '/users/@me')
if user.status_code == 401:
session.pop('oauth2_token')
return None
data = user.json()
cache.set_cached_user_data(token, data)
return cls(data) if data else None | function | python | 687 |
function arrayToObj(arr, keyPicker) {
if (!Array.isArray(arr)) {
console.error('arrayToObj.notArray', arr);
return {};
}
let obj = {};
for (let i = 0; i < arr.length; ++i) {
let v;
let a = arr[i];
let k;
if (typeof a === 'number' || typeof a === 'string') {
k = a;
v = true;
}
else if (a !== null && typeof a === 'object') {
if (typeof keyPicker === 'string') {
k = a[keyPicker];
}
else if (typeof keyPicker === 'function') {
k = keyPicker(a);
}
else {
k = i;
}
v = a;
}
if (k === undefined || k === null) {
console.error(`arrayToObj.undefinedKey-- i: ${i}, k: ${k}`);
continue;
}
if (typeof (k) !== 'number' && typeof (k) !== 'string') {
console.error(`arrayToObj.invalidKey-- i: ${i}, type: ${typeof (k)}, k: ${k}`);
continue;
}
if (obj.hasOwnProperty(k)) {
console.error(`arrayToObj.duplicateKeys-- k: ${k}, i: ${i}, a: ${a}`);
continue;
}
obj[k] = v;
}
return obj;
} | function | javascript | 688 |
public bool IsProbablePrime(int confidence)
{
BigInteger thisVal;
if ((this._data[MaxLength - 1] & 0x80000000) != 0)
thisVal = -this;
else
thisVal = this;
for (int p = 0; p < PrimesBelow2000.Length; p++)
{
BigInteger divisor = PrimesBelow2000[p];
if (divisor >= thisVal)
break;
BigInteger resultNum = thisVal % divisor;
if (resultNum.IntValue() == 0)
return false;
}
if (thisVal.RabinMillerTest(confidence))
return true;
else
return false;
} | function | c# | 689 |
public class ResourceCollectionEnumeratorContainer
extends BasePartnerComponentString
implements IResourceCollectionEnumeratorContainer
{
/**
* A reference to an offer enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<Offer, ResourceCollection<Offer>> offerEnumeratorFactory;
/**
* A reference to a customer enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<Customer, SeekBasedResourceCollection<Customer>> customerEnumeratorFactory;
/**
* A reference to a customer-user enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<CustomerUser, SeekBasedResourceCollection<CustomerUser>> customerUserEnumeratorFactory;
/**
* A reference to an invoice enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<Invoice, ResourceCollection<Invoice>> invoiceEnumeratorFactory;
/**
* A reference to a service request enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<ServiceRequest, ResourceCollection<ServiceRequest>> serviceRequestEnumeratorFactory;
/**
* A reference to an invoice line enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<InvoiceLineItem, ResourceCollection<InvoiceLineItem>> invoiceLineItemEnumeratorFactory;
/**
* A reference to an audit record enumerator factory.
*/
private IndexBasedCollectionEnumeratorFactory<AuditRecord, SeekBasedResourceCollection<AuditRecord>> auditRecordEnumeratorFactory;
/**
* A reference to a utilization record enumerator factory.
*/
private IUtilizationCollectionEnumeratorContainer utilizationRecordEnumeratorContainer;
/**
* Initializes a new instance of the {@link #ResourceCollectionEnumeratorContainer} class.
*
* @param rootPartnerOperations The root partner operations instance.
*/
public ResourceCollectionEnumeratorContainer( IPartner rootPartnerOperations )
{
super( rootPartnerOperations );
}
/**
* Gets a factory that creates offer collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<ResourceCollection<Offer>> getOffers()
{
if ( this.offerEnumeratorFactory == null )
{
this.offerEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<Offer, ResourceCollection<Offer>>( this.getPartner(),
new TypeReference<ResourceCollection<Offer>>()
{
} );
}
return this.offerEnumeratorFactory;
}
/**
* Gets a factory that creates customer collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<SeekBasedResourceCollection<Customer>> getCustomers()
{
if ( this.customerEnumeratorFactory == null )
{
this.customerEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<Customer, SeekBasedResourceCollection<Customer>>( this.getPartner(),
new TypeReference<SeekBasedResourceCollection<Customer>>()
{
} );
}
return this.customerEnumeratorFactory;
}
/**
* Gets a factory that creates customer collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<SeekBasedResourceCollection<CustomerUser>> getCustomerUsers()
{
if ( this.customerUserEnumeratorFactory == null )
{
this.customerUserEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<CustomerUser, SeekBasedResourceCollection<CustomerUser>>( this.getPartner(),
new TypeReference<SeekBasedResourceCollection<CustomerUser>>()
{
} );
}
return this.customerUserEnumeratorFactory;
}
/**
* Gets a factory that creates invoice collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<ResourceCollection<Invoice>> getInvoices()
{
if ( this.invoiceEnumeratorFactory == null )
{
this.invoiceEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<Invoice, ResourceCollection<Invoice>>( this.getPartner(),
new TypeReference<ResourceCollection<Invoice>>()
{
} );
}
return this.invoiceEnumeratorFactory;
}
/**
* Gets a factory that creates service request collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<ResourceCollection<ServiceRequest>> getServiceRequests()
{
if ( this.serviceRequestEnumeratorFactory == null )
{
this.serviceRequestEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<ServiceRequest, ResourceCollection<ServiceRequest>>( this.getPartner(),
new TypeReference<ResourceCollection<ServiceRequest>>()
{
} );
}
return this.serviceRequestEnumeratorFactory;
}
/**
* Gets a factory that creates invoice line item collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<ResourceCollection<InvoiceLineItem>> getInvoiceLineItems()
{
if ( this.invoiceLineItemEnumeratorFactory == null )
{
this.invoiceLineItemEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<InvoiceLineItem, ResourceCollection<InvoiceLineItem>>( this.getPartner(),
new TypeReference<ResourceCollection<InvoiceLineItem>>()
{
} );
}
return this.invoiceLineItemEnumeratorFactory;
}
/**
* Gets a factory that creates audit records collection enumerators.
*/
@Override
public IResourceCollectionEnumeratorFactory<SeekBasedResourceCollection<AuditRecord>> getAuditRecords()
{
if ( this.auditRecordEnumeratorFactory == null )
{
this.auditRecordEnumeratorFactory =
new IndexBasedCollectionEnumeratorFactory<AuditRecord, SeekBasedResourceCollection<AuditRecord>>
(
this.getPartner(),
new TypeReference<SeekBasedResourceCollection<AuditRecord>>(){}
);
}
return this.auditRecordEnumeratorFactory;
}
/**
* Gets factories that create enumerators for utilization records for different subscriptions.
*/
@Override
public IUtilizationCollectionEnumeratorContainer getUtilization()
{
if ( this.utilizationRecordEnumeratorContainer == null )
{
this.utilizationRecordEnumeratorContainer =
new UtilizationCollectionEnumeratorContainer( this.getPartner() );
}
return this.utilizationRecordEnumeratorContainer;
}
} | class | java | 690 |
int ha_tokudb::estimate_num_rows(DB* db, uint64_t* num_rows, DB_TXN* txn) {
int error = ENOSYS;
bool do_commit = false;
DB_BTREE_STAT64 dict_stats;
DB_TXN* txn_to_use = NULL;
if (txn == NULL) {
error = txn_begin(db_env, 0, &txn_to_use, DB_READ_UNCOMMITTED, ha_thd());
if (error) goto cleanup;
do_commit = true;
}
else {
txn_to_use = txn;
}
error = db->stat64(db, txn_to_use, &dict_stats);
if (error) { goto cleanup; }
*num_rows = dict_stats.bt_ndata;
error = 0;
cleanup:
if (do_commit) {
commit_txn(txn_to_use, 0);
txn_to_use = NULL;
}
return error;
} | function | c++ | 691 |
public class SpelParserConfiguration {
private static final SpelCompilerMode defaultCompilerMode;
static {
String compilerMode = SpringProperties.getProperty("spring.expression.compiler.mode");
defaultCompilerMode = (compilerMode != null ?
SpelCompilerMode.valueOf(compilerMode.toUpperCase()) : SpelCompilerMode.OFF);
}
private final SpelCompilerMode compilerMode;
@Nullable
private final ClassLoader compilerClassLoader;
private final boolean autoGrowNullReferences;
private final boolean autoGrowCollections;
private final int maximumAutoGrowSize;
/**
* Create a new {@code SpelParserConfiguration} instance with default settings.
*/
public SpelParserConfiguration() {
this(null, null, false, false, Integer.MAX_VALUE);
}
/**
* Create a new {@code SpelParserConfiguration} instance.
* @param compilerMode the compiler mode for the parser
* @param compilerClassLoader the ClassLoader to use as the basis for expression compilation
*/
public SpelParserConfiguration(@Nullable SpelCompilerMode compilerMode, @Nullable ClassLoader compilerClassLoader) {
this(compilerMode, compilerClassLoader, false, false, Integer.MAX_VALUE);
}
/**
* Create a new {@code SpelParserConfiguration} instance.
* @param autoGrowNullReferences if null references should automatically grow
* @param autoGrowCollections if collections should automatically grow
* @see #SpelParserConfiguration(boolean, boolean, int)
*/
public SpelParserConfiguration(boolean autoGrowNullReferences, boolean autoGrowCollections) {
this(null, null, autoGrowNullReferences, autoGrowCollections, Integer.MAX_VALUE);
}
/**
* Create a new {@code SpelParserConfiguration} instance.
* @param autoGrowNullReferences if null references should automatically grow
* @param autoGrowCollections if collections should automatically grow
* @param maximumAutoGrowSize the maximum size that the collection can auto grow
*/
public SpelParserConfiguration(boolean autoGrowNullReferences, boolean autoGrowCollections, int maximumAutoGrowSize) {
this(null, null, autoGrowNullReferences, autoGrowCollections, maximumAutoGrowSize);
}
/**
* Create a new {@code SpelParserConfiguration} instance.
* @param compilerMode the compiler mode that parsers using this configuration object should use
* @param compilerClassLoader the ClassLoader to use as the basis for expression compilation
* @param autoGrowNullReferences if null references should automatically grow
* @param autoGrowCollections if collections should automatically grow
* @param maximumAutoGrowSize the maximum size that the collection can auto grow
*/
public SpelParserConfiguration(@Nullable SpelCompilerMode compilerMode, @Nullable ClassLoader compilerClassLoader,
boolean autoGrowNullReferences, boolean autoGrowCollections, int maximumAutoGrowSize) {
this.compilerMode = (compilerMode != null ? compilerMode : defaultCompilerMode);
this.compilerClassLoader = compilerClassLoader;
this.autoGrowNullReferences = autoGrowNullReferences;
this.autoGrowCollections = autoGrowCollections;
this.maximumAutoGrowSize = maximumAutoGrowSize;
}
/**
* Return the configuration mode for parsers using this configuration object.
*/
public SpelCompilerMode getCompilerMode() {
return this.compilerMode;
}
/**
* Return the ClassLoader to use as the basis for expression compilation.
*/
@Nullable
public ClassLoader getCompilerClassLoader() {
return this.compilerClassLoader;
}
/**
* Return {@code true} if {@code null} references should be automatically grown.
*/
public boolean isAutoGrowNullReferences() {
return this.autoGrowNullReferences;
}
/**
* Return {@code true} if collections should be automatically grown.
*/
public boolean isAutoGrowCollections() {
return this.autoGrowCollections;
}
/**
* Return the maximum size that a collection can auto grow.
*/
public int getMaximumAutoGrowSize() {
return this.maximumAutoGrowSize;
}
} | class | java | 692 |
internal partial class ArgExprImplementation : IArgExpr
{
public Expression AnyInt
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<int>();
});
}
}
public Expression AnyFloat
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<float>();
});
}
}
public Expression AnyDouble
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<double>();
});
}
}
public Expression AnyDecimal
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<decimal>();
});
}
}
public Expression AnyLong
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<long>();
});
}
}
public Expression AnyChar
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<char>();
});
}
}
public Expression AnyString
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<string>();
});
}
}
public Expression AnyObject
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<object>();
});
}
}
public Expression AnyShort
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<short>();
});
}
}
public Expression AnyBool
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<bool>();
});
}
}
public Expression AnyGuid
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<Guid>();
});
}
}
public Expression AnyDateTime
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<DateTime>();
});
}
}
public Expression AnyTimeSpan
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<TimeSpan>();
});
}
}
public Expression AnyByte
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<byte>();
});
}
}
public Expression AnySByte
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<SByte>();
});
}
}
public Expression AnyUri
{
get
{
return ProfilerInterceptor.GuardInternal(() =>
{
return ArgExpr.IsAny<Uri>();
});
}
}
} | class | c# | 693 |
protected static bool UpdateSelector(FileMetadata[] includedFiles, RepositoryContent content)
{
if (content is null) return false;
if (includedFiles is null) throw new ArgumentNullException(nameof(includedFiles));
var index = Array.FindIndex(includedFiles, x => x.Name.Equals(content.Name, StringComparison.InvariantCulture));
return index != -1;
} | function | c# | 694 |
@Test
public void activateWithListener() throws Exception {
final Experiment activatedExperiment = validProjectConfig.getExperiments().get(0);
final Variation bucketedVariation = activatedExperiment.getVariations().get(0);
EventFactory mockEventFactory = mock(EventFactory.class);
Optimizely optimizely = Optimizely.builder(validDatafile, mockEventHandler)
.withBucketing(mockBucketer)
.withEventBuilder(mockEventFactory)
.withConfig(validProjectConfig)
.withErrorHandler(mockErrorHandler)
.build();
final Map<String, String> testUserAttributes = new HashMap<String, String>();
if (datafileVersion >= 4) {
testUserAttributes.put(ATTRIBUTE_HOUSE_KEY, AUDIENCE_GRYFFINDOR_VALUE);
}
else {
testUserAttributes.put("browser_type", "chrome");
}
testUserAttributes.put(testBucketingIdKey, testBucketingId);
ActivateNotificationListener activateNotification = new ActivateNotificationListener() {
@Override
public void onActivate(@Nonnull Experiment experiment, @Nonnull String userId, @Nonnull Map<String, ?> attributes, @Nonnull Variation variation, @Nonnull LogEvent event) {
assertEquals(experiment.getKey(), activatedExperiment.getKey());
assertEquals(bucketedVariation.getKey(), variation.getKey());
assertEquals(userId, testUserId);
for (Map.Entry<String, ?> entry : attributes.entrySet()) {
assertEquals(testUserAttributes.get(entry.getKey()), entry.getValue());
}
assertEquals(event.getRequestMethod(), RequestMethod.GET);
}
};
int notificationId = optimizely.notificationCenter.addNotificationListener(NotificationCenter.NotificationType.Activate, activateNotification);
when(mockEventFactory.createImpressionEvent(eq(validProjectConfig), eq(activatedExperiment), eq(bucketedVariation),
eq(testUserId), eq(testUserAttributes)))
.thenReturn(logEventToDispatch);
when(mockBucketer.bucket(activatedExperiment, testBucketingId))
.thenReturn(bucketedVariation);
Variation actualVariation = optimizely.activate(activatedExperiment.getKey(), testUserId, testUserAttributes);
assertTrue(optimizely.notificationCenter.removeNotificationListener(notificationId));
verify(mockBucketer).bucket(activatedExperiment, testBucketingId);
assertThat(actualVariation, is(bucketedVariation));
verify(mockEventHandler).dispatchEvent(logEventToDispatch);
} | function | java | 695 |
public static List<Op03SimpleStatement> sortAndRenumber(List<Op03SimpleStatement> statements) {
boolean nonNopSeen = false;
List<Op03SimpleStatement> result = ListFactory.newList();
for (Op03SimpleStatement statement : statements) {
boolean thisIsNop = statement.isAgreedNop();
if (!nonNopSeen) {
result.add(statement);
if (!thisIsNop) nonNopSeen = true;
} else {
if (!thisIsNop) {
result.add(statement);
}
}
}
sortAndRenumberInPlace(result);
return result;
} | function | java | 696 |
public RequestContextBuilder PersistUserPreferences()
{
ThrowExceptionIfContextAlreadySet();
if (_settings == null)
{
_settings = new RequestContextSettings();
}
_settings.PersistUserPreferences = true;
return this;
} | function | c# | 697 |
public class Brick {
public static final int BRICK_WIDTH = Engine.BG_WIDTH / BrickPane.COL_NUM;
public static final int BRICK_HEIGHT = 30;
public static final Paint COLOR_LEVEL_ONE_BRICK = Color.web("#fcdef0");
public static final Paint COLOR_LEVEL_TWO_BRICK = Color.web("#ffb0e1");
public static final Paint COLOR_LEVEL_THREE_BRICK = Color.web("#ff80ce");
public static final Paint COLOR_LEVEL_FOUR_BRICK = Color.web("#cd5a91");
public static final Paint COLOR_LEVEL_FIVE_BRICK = Color.web("#96304c");
public static final Paint COLOR_UNREMOVABLE = Color.web("#212121");
// unremovable brick has level UNREMOVABLE
public static final int UNREMOVABLE = 9;
// # of PowerUp type / POWER_UP_CHANCE = the probability the player gets a power-up
public static final int POWER_UP_CHANCE = 10;
private int level;
private Rectangle rectangle;
private PowerUp powerUp;
/**
* Creates a new instance of Ball with given {@code level} and
* position on {@link BrickPane}.
* @param level the level of this brick
* @param row the horizontal position on {@code BrickPane}
* @param col the vertical position on {@code BrickPane}
* @see BrickPane
*/
public Brick(int level, int row, int col) {
this.level = level;
rectangle = new Rectangle(BRICK_WIDTH - BrickPane.BRICK_GAP, BRICK_HEIGHT);
setColor();
setupPowerUp(row, col);
}
/**
* Sets color of this brick.
* @return true if {@code level} is 0 and the brick should disappear
*/
private boolean setColor() {
switch (level) {
case 1:
rectangle.setFill(COLOR_LEVEL_ONE_BRICK);
break;
case 2:
rectangle.setFill(COLOR_LEVEL_TWO_BRICK);
break;
case 3:
rectangle.setFill(COLOR_LEVEL_THREE_BRICK);
break;
case 4:
rectangle.setFill(COLOR_LEVEL_FOUR_BRICK);
break;
case 5:
rectangle.setFill(COLOR_LEVEL_FIVE_BRICK);
break;
case UNREMOVABLE:
rectangle.setFill(COLOR_UNREMOVABLE);
default:
return shouldRemove();
}
return false;
}
private boolean shouldRemove() {
return level <= 0;
}
/**
* Determines if this brick has a {@link PowerUp}. Creates an instance
* if has one. {@code row} and {@code #col} are used to determine the
* initial position of the {@link PowerUp}.
* @param row the horizontal position on {@code BrickPane}
* @param col the vertical position on {@code BrickPane}
* @see PowerUp
*/
private void setupPowerUp(int row, int col) {
int d = new Random().nextInt(POWER_UP_CHANCE);
if (d < PowerUp.POWER_UP_NUMBER) {
powerUp = new PowerUp(d, BRICK_WIDTH * (col + 0.3), BRICK_HEIGHT * (row + 0.3));
} else {
powerUp = null;
}
}
/**
* Changes states of this brick if hit by a ball.
* @return true if this brick should disappear
*/
public boolean beingHit() {
if (level == UNREMOVABLE) {
return false;
}
level -= 1;
return setColor();
}
/**
* Checks if the brick has a {@link PowerUp}
* @return true if the brick has a {@link PowerUp}
*/
public boolean hasPowerUp() {
return powerUp != null;
}
/**
* Gets the x position of this brick.
* @return a double represents x position
*/
public double getX() {
return rectangle.getX();
}
/**
* Gets the y position of this brick.
* @return a double represents y position
*/
public double getY() {
return rectangle.getY();
}
/**
* Gets {@link PowerUp} of this brick if it has one.
* @return the {@link PowerUp} of this brick
*/
public PowerUp getPowerUp() throws NullPointerException{
if (!hasPowerUp()) {
throw new NullPointerException("This brick does not have a power-up");
}
return powerUp;
}
/**
* Gets this instance of brick
* @return a rectangle represents this brick
* @throws NullPointerException If this brick is not initialized
*/
public Rectangle getInstance() throws NullPointerException{
if (rectangle == null) {
throw new NullPointerException("Brick instance has not been created");
}
return rectangle;
}
} | class | java | 698 |
public final class App {
private static final String weatherDataFileName = "weather.csv";
private static final String[] weatherColumnTitles =
{ "MnT", "MxT", "Day" };
private static final String footballDataFileName = "football.csv";
private static final String[] footballColumnTitles =
{ "Goals Allowed", "Goals", "Team" };
/**
* @return the day Number of the day with the smallest
* temperature Spread
*/
public static String analyseWeatherData() throws IOException {
InputStream dataInput =
App.class.getResourceAsStream(weatherDataFileName);
Read_AnalyseData tempSpreadAnalyser =
new Read_AnalyseData(dataInput,
weatherColumnTitles[0],
weatherColumnTitles[1],
weatherColumnTitles[2]);
return tempSpreadAnalyser.getSmallestDifferenceIndex();
}
/**
* @return the Team Name of the Team with the smallest
* distance between allowed Goals and Goals
*/
public static String analyseFootballData() throws IOException {
InputStream dataInput =
App.class.getResourceAsStream(footballDataFileName);
Read_AnalyseData footballGoalsAnalyzer =
new Read_AnalyseData(dataInput,
footballColumnTitles[0],
footballColumnTitles[1],
footballColumnTitles[2]);
return footballGoalsAnalyzer.getSmallestDifferenceIndex();
}
/**
* This is the main entry method of your program.
*
* @param args The CLI arguments passed
*/
public static void main(String... args) throws IOException {
// Call of the day analysis function …
String dayWithSmallestTempSpread = analyseWeatherData();
System.out.printf("Day with smallest temperature spread : %s%n",
dayWithSmallestTempSpread);
// Call of the goal analysis function …
String teamWithSmallestGoalSpread = analyseFootballData();
System.out.printf("Team with smallest goal spread : %s%n",
teamWithSmallestGoalSpread);
}
} | class | java | 699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.