code
stringlengths 0
30.8k
| source
stringclasses 6
values | language
stringclasses 9
values | __index_level_0__
int64 0
100k
|
---|---|---|---|
void
Hacl_GenericField64_to_field(
Hacl_Bignum_MontArithmetic_bn_mont_ctx_u64 *k,
uint64_t *a,
uint64_t *aM
)
{
uint32_t len1 = Hacl_GenericField64_field_get_len(k);
Hacl_Bignum_MontArithmetic_bn_mont_ctx_u64 k1 = *k;
Hacl_Bignum_Montgomery_bn_to_mont_u64(len1, k1.n, k1.mu, k1.r2, a, aM);
} | function | c | 99,700 |
private void printNewAndOldName(List<FilterResult> files, Environment env, NameBuilder builder) {
for(FilterResult file : files) {
StringBuilder sb = new StringBuilder();
builder.execute(file, sb);
env.write(file.toString() + " => " + sb.toString());
env.writeln("");
}
} | function | java | 99,701 |
int
AsyncSocket_SetWebSocketCookie(AsyncSocket *asock,
void *clientData,
const char *path,
const char *sessionId)
{
int ret = ASOCKERR_INVAL;
if (VALID(asock, setWebSocketCookie)) {
AsyncSocketLock(asock);
ret = VT(asock)->setWebSocketCookie(asock, clientData, path, sessionId);
AsyncSocketUnlock(asock);
}
return ret;
} | function | c | 99,702 |
func GetFederationHostsFromConfig(path string) ([]string, error) {
federations := []string{}
conf, err := config.LoadFile(path)
if err != nil {
return federations, err
}
for _, sc := range conf.ScrapeConfigs {
if sc.MetricsPath == "/federate" {
for _, tg := range sc.ServiceDiscoveryConfig.StaticConfigs {
for _, t := range tg.Targets {
if err == config.CheckTargetAddress(t[model.AddressLabel]) {
federations = append(federations, string(t[model.AddressLabel]))
}
}
}
}
}
return federations, nil
} | function | go | 99,703 |
def send_and_get_reply(self, message, *, timeout=None, unwrap=None):
check_replyable(message)
deadline = timeout_to_deadline(timeout)
if unwrap is None:
unwrap = self._unwrap_reply
serial = next(self.outgoing_serial)
self.send_message(message, serial=serial)
while True:
msg_in = self.receive(timeout=deadline_to_timeout(deadline))
reply_to = msg_in.header.fields.get(HeaderFields.reply_serial, -1)
if reply_to == serial:
if unwrap:
return unwrap_msg(msg_in)
return msg_in
self.router.incoming(msg_in)
for filter in self._filters.matches(msg_in):
filter.queue.append(msg_in) | function | python | 99,704 |
def runTest(self, save=False, indices=(2, 3)):
self.load_rundata()
self.write_rundata_objects()
self.run_code()
import clawpack.pyclaw.gauges as gauges
gauge = gauges.GaugeSolution(1, path=self.temp_path)
print('+++ Gauge 1:\n', gauge.q)
gauge = gauges.GaugeSolution(2, path=self.temp_path)
print('+++ Gauge 2:\n', gauge.q)
self.check_gauges(save=save, gauge_id=1, indices=(1, 2))
self.check_gauges(save=save, gauge_id=2, indices=(1, 2))
self.success = True | function | python | 99,705 |
class ShiftControl {
public:
ShiftControl(const Descriptor &s, Terminator &t, int dim)
: shift_{s}, terminator_{t}, shiftRank_{s.rank()}, dim_{dim} {}
void Init(const Descriptor &source) {
int rank{source.rank()};
RUNTIME_CHECK(terminator_, shiftRank_ == 0 || shiftRank_ == rank - 1);
auto catAndKind{shift_.type().GetCategoryAndKind()};
RUNTIME_CHECK(
terminator_, catAndKind && catAndKind->first == TypeCategory::Integer);
shiftElemLen_ = catAndKind->second;
if (shiftRank_ > 0) {
int k{0};
for (int j{0}; j < rank; ++j) {
if (j + 1 != dim_) {
const Dimension &shiftDim{shift_.GetDimension(k)};
lb_[k++] = shiftDim.LowerBound();
RUNTIME_CHECK(terminator_,
shiftDim.Extent() == source.GetDimension(j).Extent());
}
}
} else {
shiftCount_ =
GetInt64(shift_.OffsetElement<char>(), shiftElemLen_, terminator_);
}
}
SubscriptValue GetShift(const SubscriptValue resultAt[]) const {
if (shiftRank_ > 0) {
SubscriptValue shiftAt[maxRank];
int k{0};
for (int j{0}; j < shiftRank_ + 1; ++j) {
if (j + 1 != dim_) {
shiftAt[k] = lb_[k] + resultAt[j] - 1;
++k;
}
}
return GetInt64(
shift_.Element<char>(shiftAt), shiftElemLen_, terminator_);
} else {
return shiftCount_;
}
}
private:
const Descriptor &shift_;
Terminator &terminator_;
int shiftRank_;
int dim_;
SubscriptValue lb_[maxRank];
std::size_t shiftElemLen_;
SubscriptValue shiftCount_{};
} | class | c++ | 99,706 |
public void SetAlgorithm(IAlgorithm algorithm)
{
_algorithm = algorithm;
var types = new List<SecurityType>();
foreach (var security in _algorithm.Securities.Values)
{
if (!types.Contains(security.Type)) types.Add(security.Type);
}
SecurityType(types);
var debug = new FuncTextWriter(algorithm.Debug);
var error = new FuncTextWriter(algorithm.Error);
Console.SetOut(debug);
Console.SetError(error);
} | function | c# | 99,707 |
public Result lookupByTypes (
final LookupTree<Element, Result, Memento> root,
final List<? extends A_Type> argumentTypesList,
final Memento memento)
{
LookupTree<Element, Result, Memento> tree = root;
Result solution = tree.solutionOrNull();
while (solution == null)
{
tree = tree.lookupStepByTypes(argumentTypesList, this, memento);
solution = tree.solutionOrNull();
}
return solution;
} | function | java | 99,708 |
func (r *ReconcileDataset) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
reqLogger.Info("Reconciling Dataset")
datasetInstance := &comv1alpha1.Dataset{}
err := r.client.Get(context.TODO(), request.NamespacedName, datasetInstance)
if err != nil {
if errors.IsNotFound(err) {
reqLogger.Info("Dataset is deleted","name",request.NamespacedName)
return reconcile.Result{}, nil
}
return reconcile.Result{}, err
}
pluginPods, err := getCachingPlugins(r.client)
if(err!=nil){return reconcile.Result{},err}
if(len(pluginPods.Items)!=0){
datasetInstance.Annotations = pluginPods.Items[0].Labels
err = r.client.Update(context.TODO(),datasetInstance)
if(err!=nil){
reqLogger.Error(err,"Error while updating dataset according to caching plugin")
return reconcile.Result{},err
}
return reconcile.Result{}, nil
}
datasetInternalInstance := &comv1alpha1.DatasetInternal{}
err = r.client.Get(context.TODO(), request.NamespacedName, datasetInternalInstance)
if(err!=nil && !errors.IsNotFound(err)){
return reconcile.Result{}, err
} else if(err!=nil && errors.IsNotFound(err)){
reqLogger.Info("1-1 Dataset and DatasetInternal because there is no caching plugin")
newDatasetInternalInstance := &comv1alpha1.DatasetInternal{
ObjectMeta: metav1.ObjectMeta{
Name: datasetInstance.ObjectMeta.Name,
Namespace: datasetInstance.ObjectMeta.Namespace,
},
Spec: datasetInstance.Spec,
}
if(len(datasetInstance.Spec.Type) > 0 && datasetInstance.Spec.Type == "ARCHIVE") {
podDownloadJob,bucket := getPodDataDownload(datasetInstance,os.Getenv("OPERATOR_NAMESPACE"))
err = r.client.Create(context.TODO(),podDownloadJob)
if(err!=nil){
reqLogger.Error(err,"Error while creating pod download")
return reconcile.Result{},err
}
minioConf := &v1.Secret{}
err = r.client.Get(context.TODO(),types.NamespacedName{
Namespace: os.Getenv("OPERATOR_NAMESPACE"),
Name: "minio-conf",
},minioConf)
if err != nil {
reqLogger.Error(err,"Error while getting minio-conf secret")
return reconcile.Result{},err
}
endpoint, _ := b64.StdEncoding.DecodeString(b64.StdEncoding.EncodeToString(minioConf.Data["ENDPOINT"]))
accessKey, _ := b64.StdEncoding.DecodeString(b64.StdEncoding.EncodeToString(minioConf.Data["AWS_ACCESS_KEY_ID"]))
secretAccessKey, _ := b64.StdEncoding.DecodeString(b64.StdEncoding.EncodeToString(minioConf.Data["AWS_SECRET_ACCESS_KEY"]))
reqLogger.Info(string(endpoint))
extract := "false"
if(len(datasetInstance.Spec.Extract)>0) {
extract = datasetInstance.Spec.Extract
}
newDatasetInternalInstance.Spec = comv1alpha1.DatasetSpec{
Local: map[string]string{
"type": "COS",
"accessKeyID": string(accessKey),
"secretAccessKey": string(secretAccessKey),
"endpoint": string(endpoint),
"readonly": "true",
"bucket": bucket,
"extract": extract,
"region": "",
},
}
}
if err := controllerutil.SetControllerReference(datasetInstance, newDatasetInternalInstance, r.scheme); err != nil {
return reconcile.Result{}, err
}
err = r.client.Create(context.TODO(),newDatasetInternalInstance)
if err != nil {
return reconcile.Result{}, err
}
}
return reconcile.Result{}, nil
} | function | go | 99,709 |
public final class BranchHandle extends InstructionHandle {
/**
*
*/
private static final long serialVersionUID = -2520878285698255956L;
private BranchInstruction bi; // An alias in fact, but saves lots of casts
private BranchHandle(BranchInstruction i) {
super(i);
bi = i;
}
/** Factory methods.
*/
private static BranchHandle bh_list = null; // List of reusable handles
static final BranchHandle getBranchHandle(BranchInstruction i) {
if(bh_list == null)
return new BranchHandle(i);
else {
BranchHandle bh = bh_list;
bh_list = (BranchHandle)bh.next;
bh.setInstruction(i);
return bh;
}
}
/** Handle adds itself to the list of resuable handles.
*/
protected void addHandle() {
next = bh_list;
bh_list = this;
}
/* Override InstructionHandle methods: delegate to branch instruction.
* Through this overriding all access to the private i_position field should
* be prevented.
*/
public int getPosition() { return bi.position; }
void setPosition(int pos) {
i_position = bi.position = pos;
}
protected int updatePosition(int offset, int max_offset) {
int x = bi.updatePosition(offset, max_offset);
i_position = bi.position;
return x;
}
/**
* Pass new target to instruction.
*/
public void setTarget(InstructionHandle ih) {
bi.setTarget(ih);
}
/**
* Update target of instruction.
*/
public void updateTarget(InstructionHandle old_ih, InstructionHandle new_ih) {
bi.updateTarget(old_ih, new_ih);
}
/**
* @return target of instruction.
*/
public InstructionHandle getTarget() {
return bi.getTarget();
}
/**
* Set new contents. Old instruction is disposed and may not be used anymore.
*/
public void setInstruction(Instruction i) {
super.setInstruction(i);
if(!(i instanceof BranchInstruction))
throw new ClassGenException("Assigning " + i +
" to branch handle which is not a branch instruction");
bi = (BranchInstruction)i;
}
} | class | java | 99,710 |
function stringExcludes(...excludeVals) {
return ({value, displayName}) => {
if (isNil(value)) return null;
const fail = excludeVals.find(s => value.includes(s));
if (fail) return `${displayName} must not include "${fail}"`;
};
} | function | javascript | 99,711 |
public void purge(long timestamp) {
oldestTimestamp = timestamp;
for (int clusterId : map.keySet()) {
while (true) {
List<RevisionRange> list = map.get(clusterId);
List<RevisionRange> newList = purge(list);
if (newList == null) {
if (map.remove(clusterId, list)) {
break;
}
} else if (newList == list) {
break;
} else {
if (map.replace(clusterId, list, newList)) {
break;
}
}
}
}
} | function | java | 99,712 |
void ConfigDemosaic(u32 width , u32 height)
{
Xil_Out32((DEMOSAIC_BASE + 0x10), width );
Xil_Out32((DEMOSAIC_BASE + 0x18), height );
Xil_Out32((DEMOSAIC_BASE + 0x20), 0x0 );
Xil_Out32((DEMOSAIC_BASE + 0x28), 0x0 );
Xil_Out32((DEMOSAIC_BASE + 0x00), 0x81 );
} | function | c | 99,713 |
func New(laddr string) *Worker {
return &Worker{registeredWorkerFunctions: make(map[string]gototo.WorkerFunction),
net: "tcp",
laddr: laddr,
marshal: json.Marshal,
unmarshal: gototo.UnmarshalJSON,
marshalMap: map[string]gototo.MarshalFunction{},
unmarshalMap: map[string]gototo.UnmarshalFunction{},
convertTypeTagName: "json",
defaultContentType: "application/json",
}
} | function | go | 99,714 |
def process_vmr(in_vmr):
evaluation_list = []
recommendation_list = []
vmr_dict = xmltodict.parse(in_vmr, process_namespaces=True, force_list=('substanceAdministrationEvent', 'relatedClinicalStatement', 'substanceAdministrationProposal', 'interpretation'))
if 'substanceAdministrationEvents' in vmr_dict['org.opencds.vmr.v1_0.schema.cdsoutput:cdsOutput']['vmrOutput']['patient']['clinicalStatements']:
for substanceAdministrationEvent in vmr_dict['org.opencds.vmr.v1_0.schema.cdsoutput:cdsOutput']['vmrOutput']['patient']['clinicalStatements']['substanceAdministrationEvents']['substanceAdministrationEvent']:
immunization_id = substanceAdministrationEvent['id']['@root']
cvx = substanceAdministrationEvent['substance']['substanceCode']['@code']
date_of_admin = RE_YYYYMMDD.findall(substanceAdministrationEvent['administrationTimeInterval']['@high'])[0]
if 'relatedClinicalStatement' in substanceAdministrationEvent:
for relatedClinicalStatement in substanceAdministrationEvent['relatedClinicalStatement']:
for inside_substanceAdministrationEvent in relatedClinicalStatement['substanceAdministrationEvent']:
is_valid = inside_substanceAdministrationEvent['isValid']['@value']
dose_number = inside_substanceAdministrationEvent['doseNumber']['@value']
evaluation_interpretation = ''
for inside_relatedClinicalStatement in inside_substanceAdministrationEvent['relatedClinicalStatement']:
evaluation_code = inside_relatedClinicalStatement['observationResult']['observationValue']['concept']['@code']
evaluation_group = inside_relatedClinicalStatement['observationResult']['observationFocus']['@displayName']
evaluation_group_code = inside_relatedClinicalStatement['observationResult']['observationFocus']['@code']
if 'interpretation' in inside_relatedClinicalStatement['observationResult']:
for interpretation in inside_relatedClinicalStatement['observationResult']['interpretation']:
if len(evaluation_interpretation) > 0:
evaluation_interpretation += ","
evaluation_interpretation += interpretation['@code']
evaluation_list.append([immunization_id, date_of_admin, cvx, evaluation_group, is_valid, dose_number, evaluation_code, evaluation_interpretation, evaluation_group_code])
else:
evaluation_list.append([immunization_id, date_of_admin, cvx, 'Unsupported', 'unsupported', '0', 'UNSUPPORTED', '', '0'])
forecasts
for substanceAdministrationProposal in vmr_dict['org.opencds.vmr.v1_0.schema.cdsoutput:cdsOutput']['vmrOutput']['patient']['clinicalStatements']['substanceAdministrationProposals']['substanceAdministrationProposal']:
substance_code = ''
if substanceAdministrationProposal['substance']['substanceCode']['@codeSystem'] == '2.16.840.1.113883.12.292':
substance_code = substanceAdministrationProposal['substance']['substanceCode']['@code']
for relatedClinicalStatement in substanceAdministrationProposal['relatedClinicalStatement']:
vaccine_group = relatedClinicalStatement['observationResult']['observationFocus']['@displayName']
vaccine_group_code = relatedClinicalStatement['observationResult']['observationFocus']['@code']
forecast_concept = relatedClinicalStatement['observationResult']['observationValue']['concept']['@code']
forecast_interpretation = ''
if 'interpretation' in relatedClinicalStatement['observationResult']:
for interpretation in relatedClinicalStatement['observationResult']['interpretation']:
if len(forecast_interpretation) > 0:
forecast_interpretation += ","
forecast_interpretation += interpretation['@code']
rec_date = ''
pastdue_date = ''
earliest_date = ''
if 'proposedAdministrationTimeInterval' in substanceAdministrationProposal:
rec_date = RE_YYYYMMDD.findall(substanceAdministrationProposal['proposedAdministrationTimeInterval']['@low'])[0]
if '@high' in substanceAdministrationProposal['proposedAdministrationTimeInterval']:
pastdue_date = RE_YYYYMMDD.findall(substanceAdministrationProposal['proposedAdministrationTimeInterval']['@high'])[0]
if 'validAdministrationTimeInterval' in substanceAdministrationProposal:
earliest_date = RE_YYYYMMDD.findall(substanceAdministrationProposal['validAdministrationTimeInterval']['@low'])[0]
recommendation_list.append([vaccine_group, forecast_concept, forecast_interpretation, rec_date, vaccine_group_code, substance_code, earliest_date, pastdue_date])
return (evaluation_list, recommendation_list) | function | python | 99,715 |
public InteractionResultHolder<ItemStack> use(Level pLevel, Player pPlayer, InteractionHand pHand) {
ItemStack itemstack = pPlayer.getItemInHand(pHand);
if (pLevel.isClientSide) {
return InteractionResultHolder.success(itemstack);
} else {
if (!pPlayer.getAbilities().instabuild) {
itemstack.shrink(1);
}
pPlayer.awardStat(Stats.ITEM_USED.get(this));
pPlayer.level.playSound((Player)null, pPlayer, SoundEvents.UI_CARTOGRAPHY_TABLE_TAKE_RESULT, pPlayer.getSoundSource(), 1.0F, 1.0F);
ItemStack itemstack1 = MapItem.create(pLevel, pPlayer.getBlockX(), pPlayer.getBlockZ(), (byte)0, true, false);
if (itemstack.isEmpty()) {
return InteractionResultHolder.consume(itemstack1);
} else {
if (!pPlayer.getInventory().add(itemstack1.copy())) {
pPlayer.drop(itemstack1, false);
}
return InteractionResultHolder.consume(itemstack);
}
}
} | function | java | 99,716 |
void
ClSimplexSolver::Reset()
{
#ifdef CL_TRACE
Tracer TRACER(__FUNCTION__);
cerr << "()" << endl;
#endif
assert(false);
} | function | c++ | 99,717 |
def importance_gpomdp(states, rewards, gradients, p_tar, p_gen,
use_baseline=True, use_natural_grad=True,
fisher_diag=False, ret_diagnostics=False,
sum_args=None, sampling_args=None):
res = _create_episode_info()
for rs, gs, ps, qs in izip(rewards, gradients, p_tar, p_gen):
traj_p = np.sum(ps)
traj_q = np.sum(qs)
sum_grads = np.cumsum(gs, axis=0)
traj_rs = np.asarray(rs)
r_grad = np.sum((traj_rs * sum_grads.T).T, axis=0)
res.r_grads.append(r_grad)
res.traj_p_tar.append(traj_p)
res.traj_p_gen.append(traj_q)
res.traj_grads.append(sum_grads[-1])
res.traj_r.append(np.sum(rs))
res.act_grads.extend(gs)
res.state_act_p_tar.extend(np.cumsum(ps))
res.state_act_p_gen.extend(np.cumsum(qs))
return _importance_policy_gradient(res=res,
use_baseline=use_baseline,
use_natural_grad=use_natural_grad,
fisher_diag=fisher_diag,
ret_diagnostics=ret_diagnostics,
sampling_args=sampling_args) | function | python | 99,718 |
void clif_party_show_picker( struct map_session_data* sd, struct item* item_data ){
#if PACKETVER >= 20071002
nullpo_retv( sd );
nullpo_retv( item_data );
struct item_data* id = itemdb_search( item_data->nameid );
struct PACKET_ZC_ITEM_PICKUP_PARTY p;
p.packetType = HEADER_ZC_ITEM_PICKUP_PARTY;
p.AID = sd->status.account_id;
p.itemId = client_nameid( item_data->nameid );
p.identified = item_data->identify;
p.damaged = item_data->attribute;
p.refine = item_data->refine;
clif_addcards( &p.slot, item_data );
p.location = id->equip;
p.itemType = itemtype( id->nameid );
#if PACKETVER_MAIN_NUM >= 20200916 || PACKETVER_RE_NUM >= 20200724
p.enchantgrade = item_data->enchantgrade;
#endif
clif_send( &p, sizeof( p ), &sd->bl, PARTY_SAMEMAP_WOS );
#endif
} | function | c++ | 99,719 |
def build_solution(best_path, current_node):
action_list = []
while current_node.get_parent() is not None:
current_state = current_node.get_state()
action_list.append(current_node.get_action())
current_node = best_path[current_state]
Python's list.reverse() reverses in-place.
action_list.reverse()
return action_list | function | python | 99,720 |
public void resetPopulation()
{
int i, j;
_is_inactive = true;
for(i=0; i<_population.length; i++)
{
for(j=0; j<_len_individual; j++)
{
_population [i][j] = 0;
_end_individual[i][j] = 0;
}
for(j=0; j<_num_out; j++)
{
_sub_output[i][j] = 0;
}
_sub_output_history[i].resetHistory();
}
} | function | java | 99,721 |
public class AgedFileInfo : IComparable<AgedFileInfo>
{
#region Public Members
public AgedFileInfo ( )
{
}
public AgedFileInfo ( FileInfo pfileInfo )
{
_fileInfo = pfileInfo;
}
public FileInfo Details
{
get
{
return _fileInfo;
}
}
#endregion
#region Private Members
FileInfo _fileInfo = null;
#endregion
#region Overridden Methods of Base Class (Object) - All of them
public override string ToString ( )
{
return string.Format (
Properties.Resources.AGED_FILES_INFO_TOSTRING_TEMPLATE ,
new string [ ]
{
this.GetType ( ).FullName,
_fileInfo.Name ,
SysDateFormatters.FormatDateTimeForShow (
_fileInfo.LastAccessTimeUtc ) ,
NumberFormatters.Integer (
_fileInfo.LastAccessTimeUtc.Ticks ) ,
SysDateFormatters.FormatDateTimeForShow (
_fileInfo.LastAccessTime ) ,
NumberFormatters.Integer (
_fileInfo.LastAccessTime.Ticks )
} );
}
public override bool Equals ( object obj )
{
if ( obj.GetType ( ) == this.GetType ( ) )
{
AgedFileInfo afiTheOther = ( AgedFileInfo ) obj;
return _fileInfo.LastAccessTimeUtc.Ticks.Equals ( afiTheOther._fileInfo.LastAccessTimeUtc.Ticks );
}
else
{
return false;
}
}
public override int GetHashCode ( )
{
return _fileInfo.LastAccessTimeUtc.Ticks.GetHashCode ( );
}
#endregion
#region IComparable<AgedFileInfo> Members
int IComparable<AgedFileInfo>.CompareTo ( AgedFileInfo other )
{
if ( other != null )
{
return -( this._fileInfo.LastWriteTimeUtc.Ticks.CompareTo ( other._fileInfo.LastWriteTimeUtc.Ticks ) );
}
else
{
return -( this._fileInfo.LastWriteTimeUtc.Ticks.CompareTo ( DateTime.MinValue.Ticks ) );
}
}
#endregion
} | class | c# | 99,722 |
public Map<String, Double> copyObservables(boolean assignedOnly) {
Map<String, Double> copy = new HashMap<String, Double>();
synchronized (parameterValues) {
for (IObservable observable : parameterValues.keySet()) {
if (!observable.isInternal()) {
if (!assignedOnly || (assignedOnly && hasValue(observable))) {
copy.put(observable.name(), getObservedValue(observable));
}
}
}
}
return copy;
} | function | java | 99,723 |
private void colourClient(final String network, final GroupChatUser client) {
final StringConverter sc = client.getUser().getConnection().getParser().get()
.getStringConverter();
final User myself = client.getUser().getConnection().getLocalUser().orElse(null);
final String nickOption1 = sc.toLowerCase(network + ':' + client.getNickname());
final String nickOption2 = sc.toLowerCase("*:" + client.getNickname());
if (useowncolour && client.getUser().equals(myself)) {
final Colour color = colourManager.getColourFromString(owncolour, null);
putColour(client, color);
} else if (userandomcolour) {
putColour(client, getColour(client.getNickname()));
}
Color color = null;
if (nickColours.containsKey(nickOption1)) {
color = nickColours.get(nickOption1);
} else if (nickColours.containsKey(nickOption2)) {
color = nickColours.get(nickOption2);
}
if (color != null) {
putColour(client, NickColourUtils.getColourfromColor(color));
}
} | function | java | 99,724 |
public class Provider : IHttpHandler, IRequiresSessionState {
public bool IsReusable {
get { return true; }
}
public void ProcessRequest(HttpContext context) {
IRequest request = ProviderEndpoint.Provider.GetRequest();
if (request != null) {
if (!request.IsResponseReady) {
ProviderEndpoint.PendingAnonymousRequest = request as IAnonymousRequest;
ProviderEndpoint.PendingAuthenticationRequest = request as IAuthenticationRequest;
if (ProviderEndpoint.PendingAuthenticationRequest != null) {
Code.Util.ProcessAuthenticationChallenge(ProviderEndpoint.PendingAuthenticationRequest);
} else if (ProviderEndpoint.PendingAnonymousRequest != null) {
Code.Util.ProcessAnonymousRequest(ProviderEndpoint.PendingAnonymousRequest);
}
}
if (request.IsResponseReady) {
ProviderEndpoint.Provider.SendResponse(request);
ProviderEndpoint.PendingRequest = null;
}
}
}
} | class | c# | 99,725 |
def _train_step(self, obs, actions, rewards, dones, mus, states, masks, steps, writer=None):
cur_lr = self.learning_rate_schedule.value_steps(steps)
td_map = {self.train_model.obs_ph: obs, self.polyak_model.obs_ph: obs, self.action_ph: actions,
self.reward_ph: rewards, self.done_ph: dones, self.mu_ph: mus, self.learning_rate_ph: cur_lr}
if states is not None:
td_map[self.train_model.states_ph] = states
td_map[self.train_model.dones_ph] = masks
td_map[self.polyak_model.states_ph] = states
td_map[self.polyak_model.dones_ph] = masks
if writer is not None:
if self.full_tensorboard_log and (1 + (steps / self.n_batch)) % 10 == 0:
run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
step_return = self.sess.run([self.summary] + self.run_ops, td_map, options=run_options,
run_metadata=run_metadata)
writer.add_run_metadata(run_metadata, 'step%d' % steps)
else:
step_return = self.sess.run([self.summary] + self.run_ops, td_map)
writer.add_summary(step_return[0], steps)
step_return = step_return[1:]
else:
step_return = self.sess.run(self.run_ops, td_map)
return self.names_ops, step_return[1:] | function | python | 99,726 |
def drifter_loop(p):
if p < 13:
return None
from pulp import LpProblem, LpVariable, PULP_CBC_CMD
q = p if p%2 else p//2 if p%4 else p//4
lp = LpProblem()
a = LpVariable("a", cat="Integer")
b = LpVariable("b", cat="Integer")
lp += (6*q*b - 23*a >= 1, "x")
lp += (50*a - 13*q*b >= 1, "y")
lp += (71*(6*q*b - 23*a) + 36*(50*a - 13*q*b), "population")
lp.solve(PULP_CBC_CMD(msg=False))
sols = {v.name: int(v.varValue) for v in lp.variables()}
a, b = sols["a"], sols["b"]
x = 6*q*b - 23*a - 1
y = 50*a - 13*q*b - 1
looplen = 200*(x+1) + 92*(y+1)
pat = dl1(0,0)
cx, cy = -20, 15
for _ in range(x):
pat += dl2(cx,cy)
cx -= 29
cy += 29
pat += dl3(cx,cy)
cx += 15
cy += 20
for _ in range(y):
pat += dl4(cx,cy)
cx += 24
pat += pat("rot180", cx, cy)
for _ in range(looplen // p):
pat[0,0] = 1
pat = pat[p]
return (pat, None) | function | python | 99,727 |
public static ContentEncoding resolveEncoding(final HttpServletRequest request) {
String acceptEncodingHeader = request.getHeader("Accept-Encoding");
if (acceptEncodingHeader == null) {
return RAW;
}
String[] encodings = acceptEncodingHeader.split(",");
List<String> encodingList = Arrays.asList(encodings);
if (encodingList.contains(GZIP.getHeaderValue())) {
return GZIP;
}
if (encodingList.contains(DEFLATE.getHeaderValue())) {
return DEFLATE;
}
return RAW;
} | function | java | 99,728 |
def convert_imperial_str(value, to_unit):
if "''" in value:
split_on_inches = value.strip().split("''")
split_on_inches = list(filter(lambda x: x != '', split_on_inches))
if len(split_on_inches[0].split("'")) == 2:
feet = float(split_on_inches[0].split("'")[0])
elif len(split_on_inches[0].split("'")) == 1:
feet = 0.
else:
raise ValueError
if "'" in split_on_inches[0]:
inches_int = int(split_on_inches[0].split("'")[1])
else:
inches_int = int(split_on_inches[0])
if len(split_on_inches) == 2 and split_on_inches[1] != "":
inches_frac = float(Fraction(split_on_inches[1]))
else:
inches_frac = 0.
inches_float = inches_int + inches_frac
else:
feet = float(value[0:-1])
inches_float = 0.
return convert_imperial(feet=feet, inches=inches_float, to_unit=to_unit) | function | python | 99,729 |
@NonNull
public static File workspacePathToFile(@NonNull IPath path) {
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IResource res = root.findMember(path);
if (res != null) {
IPath location = res.getLocation();
if (location != null) {
return location.toFile();
}
return root.getLocation().append(path).toFile();
}
return path.toFile();
} | function | java | 99,730 |
static FIBITMAP* LogLuminance(FreeImageMem* mem, FIBITMAP *Y) {
FIBITMAP *H = NULL;
try {
FIBITMAP *H = FreeImage_Clone(mem, Y);
if(!H) throw(1);
const unsigned width = FreeImage_GetWidth(H);
const unsigned height = FreeImage_GetHeight(H);
const unsigned pitch = FreeImage_GetPitch(H);
float maxLum = -1e20F, minLum = 1e20F;
BYTE *bits = (BYTE*)FreeImage_GetBits(H);
for(unsigned y = 0; y < height; y++) {
const float *pixel = (float*)bits;
for(unsigned x = 0; x < width; x++) {
const float value = pixel[x];
maxLum = (maxLum < value) ? value : maxLum;
minLum = (minLum < value) ? minLum : value;
}
bits += pitch;
}
if(maxLum == minLum) throw(1);
const float scale = 100.F / (maxLum - minLum);
bits = (BYTE*)FreeImage_GetBits(H);
for(unsigned y = 0; y < height; y++) {
float *pixel = (float*)bits;
for(unsigned x = 0; x < width; x++) {
const float value = (pixel[x] - minLum) * scale;
pixel[x] = log(value + EPSILON);
}
bits += pitch;
}
return H;
} catch(int) {
if(H) FreeImage_Unload(mem, H);
return NULL;
}
} | function | c++ | 99,731 |
public class UdpServer implements Runnable {
public static final int DEFAULT_PORT = 9876;
private DatagramSocket serverSocket;
private boolean runServer = true;
// TODO configuration
private ExecutorService executorService = Executors.newFixedThreadPool(10);
private MessageProcessor processor;
private final static Logger logger = Logger.getLogger(UdpServer.class.getName());
public UdpServer(MessageProcessor processor) {
this.processor = processor;
}
public void start() throws Exception {
serverSocket = new DatagramSocket(DEFAULT_PORT);
byte[] receiveData = new byte[1024];
while(runServer) {
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
serverSocket.receive(receivePacket);
final String msg = new String(receivePacket.getData());
executorService.submit(new Runnable() {
@Override
public void run() {
processor.processMessage(msg);
}
});
// TODO support this
// InetAddress IPAddress = receivePacket.getAddress();
}
}
public void stop() {
runServer = false;
executorService.shutdown();
serverSocket.close();
}
@Override
public void run() {
try {
start();
} catch(Throwable e) {
logger.log(Level.SEVERE, "server didn't start properly", e);
}
}
} | class | java | 99,732 |
tree
maybe_push_decl (tree decl)
{
tree type = TREE_TYPE (decl);
if (decl == error_mark_node
|| (TREE_CODE (decl) != PARM_DECL
&& DECL_CONTEXT (decl) != NULL_TREE
&& TREE_CODE (DECL_CONTEXT (decl)) != NAMESPACE_DECL)
|| (TREE_CODE (decl) == TEMPLATE_DECL && !namespace_bindings_p ())
|| TREE_CODE (type) == UNKNOWN_TYPE
|| (TREE_CODE (decl) == FUNCTION_DECL
&& DECL_TEMPLATE_SPECIALIZATION (decl)))
return decl;
else
return pushdecl (decl);
} | function | c | 99,733 |
def read_met_data(hdf5_file, met_data_node, indices_dict, year, month, day,
days_in_month):
import numpy.ma as ma, h5py
h5file = h5py.File(hdf5_file, mode = "r")
xmin, xmax = indices_dict['xstart_5km'], indices_dict['xend_5km']
ymin, ymax = indices_dict['ystart_5km'], indices_dict['yend_5km']
grid_list = []
var_list = ['rainfall', 'min_temp', 'max_temp', 'et']
for variable in var_list:
if variable == 'rainfall':
dset_path = (r'%sdaily/rainfall/rainfall_%s/rainfall_%s_%02d_%02d'
% (met_data_node, year, year, month, day))
elif variable == 'min_temp':
dset_path = (r'%sdaily/min_temp/min_temp_%s/min_temp_%s_%02d_%02d'
% (met_data_node, year, year, month, day))
elif variable == 'max_temp':
dset_path = (r'%sdaily/max_temp/max_temp_%s/max_temp_%s_%02d_%02d'
% (met_data_node, year, year, month, day))
elif (variable=='et') and ((year<1969) or (year>2005)):
dset_path = (r'%smonthly/est_pm_pet/est_pm_pet_%s_%02d'
% (met_data_node, year, month))
else:
dset_path = (r'%smonthly/pm_pet/pm_pet_%s_%02d'
% (met_data_node, year, month))
dset = h5file[dset_path]
array = dset[:].astype(float)
array = ma.masked_equal(array, float(dset.attrs['No_Data_Value']))
array = array[ymin:ymax, xmin:xmax]
if variable == 'et':
array = array / days_in_month
array = 1.0*rebin(array, 5)
grid_list.append(array)
h5file.close()
return grid_list | function | python | 99,734 |
public static IList<TOut> MapObjectsList<TIn, TOut>(IEnumerable<TIn> source) where TIn : class where TOut : class
{
if (source == null)
return null;
var destination = new List<TOut>();
var sourcetype = typeof(TIn);
var destinationtype = typeof(TOut);
var commonproperties = GetCommonproperties(sourcetype, destinationtype);
var enumerable = source as TIn[] ?? source.ToArray();
var count = enumerable.Length;
for (var i = 0; i < count; i++)
{
var input = enumerable[i];
var output = Activator.CreateInstance<TOut>();
foreach (var match in commonproperties)
{
match.Dp.SetValue(output, match.Sp.GetValue(input, null), null);
}
destination.Add(output);
}
return destination;
} | function | c# | 99,735 |
public static Money GetPresentValue(Swap swap, string baseParty)
{
var list = new List<Money>();
foreach (InterestRateStream stream in swap.swapStream)
{
Money presentValueOfStream = CashflowsHelper.GetPresentValue(stream.cashflows);
list.AddRange(GetValue(stream.payerPartyReference.href, stream.receiverPartyReference.href, baseParty, presentValueOfStream));
}
Money sumPVs = MoneyHelper.Sum(list);
Money presentValueOfAdditionalPayments = GetPresentValueOfAdditionalPayments(swap, baseParty, sumPVs.currency);
sumPVs = MoneyHelper.Add(sumPVs, presentValueOfAdditionalPayments);
return sumPVs;
} | function | c# | 99,736 |
func Anagrams(word string) []string {
if len(word) <= 1 {
return []string{word}
}
output := []string{}
var letter byte
var pre []byte
var post []byte
var joined []byte
for x := 0; x < len(word); x++ {
workingWord := make([]byte, len(word))
copy(workingWord, []byte(word))
letter = workingWord[x]
pre = workingWord[0:x]
post = workingWord[x+1 : len(word)]
joined = append(pre, post...)
for _, subResult := range Anagrams(string(joined)) {
output = append(output, string(letter)+subResult)
}
}
return output
} | function | go | 99,737 |
void * find_saved_temp_mem(void *hob_list_ptr)
{
EFI_GUID temp_hob_guid = FSP_BOOTLOADER_TEMPORARY_MEMORY_HOB_GUID;
EFI_HOB_GUID_TYPE *saved_mem_hob =
(EFI_HOB_GUID_TYPE *) find_hob_by_guid(
hob_list_ptr, &temp_hob_guid);
if (saved_mem_hob == NULL)
return NULL;
return (void *) ((char *) saved_mem_hob + sizeof(EFI_HOB_GUID_TYPE));
} | function | c | 99,738 |
class SimpleLoginAlgorithm:
"""Class holding the algorithm to carry out a simple login.
Attributes:
base_url (str): API base URL, normally ending '/OrbitAPI'
data_view (str): DataView being logged into
api_client (aa.ApiClient): API client used to log in
session_id (str): Apteco session ID for the created session
access_token (str): access token for the created session
user (User): API user
Methods:
run(): entry point to run the algorithm
"""
def __init__(self, base_url: str, data_view: str):
"""
Args:
base_url (str): API base URL, normally ending '/OrbitAPI'
data_view (str): DataView being logged into
"""
self.base_url = base_url
self.data_view = data_view
def run(self, user: str, password: str) -> Credentials:
"""Run the algorithm with the given login credentials.
Args:
user (str): username of API user
password (str): password for this user
Returns:
Credentials: API session credentials
"""
self._create_unauthorized_client()
self._simple_login(user, password)
self._create_credentials()
return self.credentials
def _create_unauthorized_client(self):
"""Create an unauthorized API client."""
config = aa.Configuration()
config.host = self.base_url
self._config = config
self.api_client = aa.ApiClient(configuration=self._config)
def _simple_login(self, user, password):
"""Call API to perform simple login."""
sessions_controller = aa.SessionsApi(self.api_client)
login_response = sessions_controller.sessions_create_session_simple(
self.data_view, user, password
)
self.session_id = login_response.session_id
self.access_token = login_response.access_token
self.user = User(
username=login_response.user.username,
first_name=login_response.user.firstname,
surname=login_response.user.surname,
email_address=login_response.user.email_address,
)
def _create_credentials(self):
"""Initialize session credentials object."""
self.credentials = Credentials(
self.base_url, self.data_view, self.session_id, self.access_token, self.user
) | class | python | 99,739 |
def custom_evaluation(
self,
teacher_action: Message,
labels: Optional[Tuple[str]],
model_response: Message,
) -> None:
pass | function | python | 99,740 |
def _tag_psflets(shape, x, y, good, dx=10, dy=10):
psflet_indx = np.zeros(shape, np.int)
oldshape = x.shape
x_int = (np.reshape(x + 0.5, -1)).astype(int)
y_int = (np.reshape(y + 0.5, -1)).astype(int)
good = np.reshape(good, -1)
x = np.reshape(x, -1)
y = np.reshape(y, -1)
x_i = np.arange(shape[1])
y_i = np.arange(shape[0])
x_i, y_i = np.meshgrid(x_i, y_i)
mindist = np.ones(shape) * 1e10
for i in range(x_int.shape[0]):
if good[i]:
iy1, iy2 = [y_int[i] - dy, y_int[i] + dy + 1]
ix1, ix2 = [x_int[i] - dx, x_int[i] + dx + 1]
dist = (y[i] - y_i[iy1:iy2, ix1:ix2])**2
dist += (x[i] - x_i[iy1:iy2, ix1:ix2])**2
indx = np.where(dist < mindist[iy1:iy2, ix1:ix2])
psflet_indx[iy1:iy2, ix1:ix2][indx] = i
mindist[iy1:iy2, ix1:ix2][indx] = dist[indx]
good = np.reshape(good, oldshape)
x = np.reshape(x, oldshape)
y = np.reshape(y, oldshape)
return psflet_indx | function | python | 99,741 |
public abstract class Problem
{
public abstract string getLabel();
public virtual string getFixDescription() { return "No fix or suggestion description available."; }
public virtual bool canAutoFix() { return GetType().GetMethod("autoFix").DeclaringType != typeof(Problem); }
public virtual void autoFix() { }
public virtual void select() { }
} | class | c# | 99,742 |
def change_dtype(input_filename,output_filename,input_fieldname,
output_fieldname,new_dtype,grid_type,**grid_kwargs):
field = iodriver.load_field(input_filename,
file_type=\
iodriver.get_file_extension(input_filename),
field_type='Generic',
fieldname=input_fieldname,
unmask=False,
timeslice=None,
grid_type=grid_type,
**grid_kwargs)
field.change_dtype(new_dtype)
iodriver.write_field(output_filename,field,
file_type=iodriver.get_file_extension(output_filename),
fieldname=output_fieldname) | function | python | 99,743 |
@Test
public void listLicensesCheckIndividualLicense() throws IOException {
PagedIterable<GHLicense> licenses = gitHub.listLicenses();
for (GHLicense lic : licenses) {
if (lic.getKey().equals("mit")) {
assertThat(lic.getUrl(), equalTo(new URL(mockGitHub.apiServer().baseUrl() + "/licenses/mit")));
return;
}
}
fail("The MIT license was not found");
} | function | java | 99,744 |
@XmlRootElement
@Entity
public class OAuthPermission extends Permission {
private static final long serialVersionUID = -6486616235830491290L;
private List<String> httpVerbs = new LinkedList<>();
private List<String> uris = new LinkedList<>();
public OAuthPermission() {
}
public OAuthPermission(String permission) {
this(permission, null);
}
public OAuthPermission(String permission, String description) {
super(permission, description);
}
/**
* Sets the optional list of HTTP verbs, example,
* "GET" and "POST", etc
*
* @param httpVerbs the list of HTTP verbs
*/
public void setHttpVerbs(List<String> httpVerbs) {
this.httpVerbs = httpVerbs;
}
/**
* Gets the optional list of HTTP verbs
*
* @return the list of HTTP verbs
*/
@ElementCollection
public List<String> getHttpVerbs() {
return httpVerbs;
}
/**
* Sets the optional list of relative request URIs
*
* @param uri the list of URIs
*/
public void setUris(List<String> uri) {
this.uris = uri;
}
/**
* Gets the optional list of relative request URIs
*
* @return the list of URIs
*/
@ElementCollection
public List<String> getUris() {
return uris;
}
@Override
public boolean equals(Object object) {
if (!(object instanceof OAuthPermission) || !super.equals(object)) {
return false;
}
OAuthPermission that = (OAuthPermission) object;
if (getHttpVerbs() != null && that.getHttpVerbs() == null
|| getHttpVerbs() == null && that.getHttpVerbs() != null
|| getHttpVerbs() != null && !getHttpVerbs().equals(that.getHttpVerbs())) {
return false;
}
if (getUris() != null && that.getUris() == null // NOPMD
|| getUris() == null && that.getUris() != null // NOPMD
|| getUris() != null && !getUris().equals(that.getUris())) { // NOPMD
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = super.hashCode();
if (getHttpVerbs() != null) {
hashCode = 31 * hashCode + getHttpVerbs().hashCode();
}
if (getUris() != null) {
hashCode = 31 * hashCode + getUris().hashCode();
}
return hashCode;
}
} | class | java | 99,745 |
private void OnCastleDestroyed(Unit destroyedCastle)
{
if (destroyedCastle == _allyCastle)
{
Debug.Log("Enemy win");
string winner = MatchCommunicationManager.Instance.OpponentId;
string loser = NakamaSessionManager.Instance.Session.UserId;
string matchId = MatchCommunicationManager.Instance.MatchId;
float matchDuration = Time.unscaledTime - _timerStart;
int winnerTowersDestroyed = 1 + _allyTowers.Count(x => x.IsDestroyed == true);
int loserTowersDestroyed = 0 + _enemyTowers.Count(x => x.IsDestroyed == true);
MatchMessageGameEnded message = new MatchMessageGameEnded(winner, loser, matchId, winnerTowersDestroyed, loserTowersDestroyed, matchDuration);
MatchCommunicationManager.Instance.SendMatchStateMessage(MatchMessageType.MatchEnded, message);
MatchCommunicationManager.Instance.SendMatchStateMessageSelf(MatchMessageType.MatchEnded, message);
}
else
{
Debug.Log("Ally win");
string winner = NakamaSessionManager.Instance.Session.UserId;
string loser = MatchCommunicationManager.Instance.OpponentId;
string matchId = MatchCommunicationManager.Instance.MatchId;
float matchDuration = Time.unscaledTime - _timerStart;
int winnerTowersDestroyed = 1 + _enemyTowers.Count(x => x.IsDestroyed == true);
int loserTowersDestroyed = 0 + _allyTowers.Count(x => x.IsDestroyed == true);
MatchMessageGameEnded message = new MatchMessageGameEnded(winner, loser, matchId, winnerTowersDestroyed, loserTowersDestroyed, matchDuration);
MatchCommunicationManager.Instance.SendMatchStateMessage(MatchMessageType.MatchEnded, message);
MatchCommunicationManager.Instance.SendMatchStateMessageSelf(MatchMessageType.MatchEnded, message);
}
} | function | c# | 99,746 |
function Level(levelNumber, tileDelta, parent) {
this.levelNumber = levelNumber;
this.tileDelta = tileDelta;
this.parent = parent;
this.texelSize = tileDelta.latitude * Angle.DEGREES_TO_RADIANS / parent.tileHeight;
this.tileWidth = parent.tileWidth;
this.tileHeight = parent.tileHeight;
this.sector = parent.sector;
} | function | javascript | 99,747 |
class Solution {
public:
vector<int> arrayRankTransform(vector<int>& arr) {
vector<int> rank(arr.size());
vector<pair<int, int>> arr_idx;
for(int i = 0; i < arr.size(); i++)
arr_idx.emplace_back(make_pair(arr[i], i));
sort(arr_idx.begin(), arr_idx.end(),
[](pair<int, int>& a, pair<int, int>& b)->bool{
return a.first < b.first;
});
int r = 1;
for(int i = 0; i < arr_idx.size(); i++) {
rank[arr_idx[i].second] = r;
if(i+1 < arr_idx.size())
arr_idx[i].first == arr_idx[i+1].first ? r : ++r;
}
return rank;
}
} | class | c++ | 99,748 |
public static void exclusive(final JCheckBox first, final boolean firstState, final JCheckBox second, final boolean secondState) {
ActionListener l = new ActionListener() {
/**
* One way check for the condition
* @param checked the first to check
* @param checkedState the state to match
* @param changed the changed control
* @param impliedState the implied state
*/
private void check(final JCheckBox checked, final boolean checkedState, final JCheckBox changed, final boolean impliedState) {
if (checked.isSelected() == checkedState) {
changed.setSelected(impliedState);
changed.setEnabled(false);
}
else {
changed.setEnabled(true);
}
}
/**
* {@inheritDoc}
*/
@Override
public void actionPerformed(ActionEvent e) {
check(first, firstState, second, !secondState);
check(second, secondState, first, !firstState);
}
};
first.addActionListener(l);
second.addActionListener(l);
l.actionPerformed(null);
} | function | java | 99,749 |
@MustBeInvokedByOverriders
protected void initAdapter() {
if (initializedAdapter) {
throw new IllegalStateException("adapter already initialized");
}
adapter.initAdapter();
this.initializedAdapter = true;
this.adapterInitialized();
observer.onNext(new AdapterInitializedEvent(this));
} | function | java | 99,750 |
def xml2dict(domnode, go_deep=False, drop=tuple()):
if not go_deep and hasattr(domnode,'getAttribute') \
and domnode.getAttribute('id'):
return [ domnode.getAttribute('id') ]
elif domnode.childNodes.length == 1 and domnode.firstChild.nodeType==3:
return domnode.firstChild.data
elif domnode.childNodes.length == 0:
return None
else:
return KeyedArray([( n.tagName, xml2dict(n, drop=drop) )
for n in domnode.childNodes
if (n.nodeType==1 and n.tagName not in drop)]) | function | python | 99,751 |
func TestUndeployRouter(t *testing.T) {
routerName := "e2e-experiment-" + globalTestContext.TestID
t.Log(fmt.Sprintf("Retrieving router with name '%s' created from previous test step", routerName))
existingRouter, err := getRouterByName(
globalTestContext.httpClient, globalTestContext.APIBasePath, globalTestContext.ProjectID, routerName)
require.NoError(t, err)
t.Log("Undeploying router")
url := fmt.Sprintf("%s/projects/%d/routers/%d/undeploy", globalTestContext.APIBasePath,
globalTestContext.ProjectID, existingRouter.ID)
req, err := http.NewRequestWithContext(context.Background(), http.MethodPost, url, nil)
require.NoError(t, err)
response, err := globalTestContext.httpClient.Do(req)
require.NoError(t, err)
assert.Equal(t, http.StatusOK, response.StatusCode)
responseBody, err := ioutil.ReadAll(response.Body)
defer response.Body.Close()
require.NoError(t, err)
t.Log("Undeploy Response:", string(responseBody))
t.Log("Wait Undeploy router")
time.Sleep(time.Second * deleteTimeoutSeconds)
t.Log("Testing GET router")
router, err := getRouter(
globalTestContext.httpClient,
globalTestContext.APIBasePath,
globalTestContext.ProjectID,
int(existingRouter.ID),
)
require.NoError(t, err)
require.NotNil(t, router.CurrRouterVersion)
assert.Equal(t, 1, int(router.CurrRouterVersion.Version))
assert.Equal(t, models.RouterVersionStatusUndeployed, router.CurrRouterVersion.Status)
assert.Equal(t, models.RouterStatusUndeployed, router.Status)
assert.Equal(t, "", router.Endpoint)
t.Log("Checking cluster state")
assert.False(t, isDeploymentExists(
globalTestContext.clusterClients,
globalTestContext.ProjectName,
fmt.Sprintf("%s-turing-router-%d-0-deployment", router.Name, 1)))
assert.False(t, isDeploymentExists(
globalTestContext.clusterClients,
globalTestContext.ProjectName,
fmt.Sprintf("%s-turing-enricher-%d-0-deployment", router.Name, 1)))
assert.False(t, isDeploymentExists(
globalTestContext.clusterClients,
globalTestContext.ProjectName,
fmt.Sprintf("%s-turing-ensembler-%d-0-deployment", router.Name, 1)))
assert.False(t, isConfigMapExists(
globalTestContext.clusterClients,
globalTestContext.ProjectName,
fmt.Sprintf("%s-turing-fiber-config", router.Name)))
_, err = getRouterDownstream(globalTestContext.clusterClients,
globalTestContext.ProjectName,
fmt.Sprintf("%s-turing-router", router.Name))
assert.Equal(t, true, err != nil)
} | function | go | 99,752 |
private void forwardMsg(String msg, User excludedUser) throws IOException {
Set<User> keySet = connectedUsers.keySet();
int count = 0;
for (User user : keySet) {
if (user.equals(excludedUser)) {
continue;
}
SocketChannel socketChannel = connectedUsers.get(user);
ChannelHandler.sendMsg(socketChannel, excludedUser.getUsername() + ": " + msg);
count++;
}
SocketChannel socketChannel = connectedUsers.get(excludedUser);
ChannelHandler.sendMsg(socketChannel, String.format(Constants.MESSAGE_SENT_TO_USERS, count));
} | function | java | 99,753 |
QString GenericChatForm::resolveToxPk(const ToxPk& pk)
{
Friend* f = FriendList::findFriend(pk);
if (f) {
return f->getDisplayedName();
}
for (Group* it : GroupList::getAllGroups()) {
QString res = it->resolveToxId(pk);
if (!res.isEmpty()) {
return res;
}
}
return pk.toString();
} | function | c++ | 99,754 |
def duplicate_nodes(self, nodes):
if not nodes:
return
self._undo_stack.beginMacro('duplicate nodes')
self.clear_selection()
serial = self._serialize(nodes)
new_nodes = self._deserialize(serial)
offset = 50
for n in new_nodes:
x, y = n.pos()
n.set_pos(x + offset, y + offset)
n.set_property('selected', True)
self._undo_stack.endMacro()
return new_nodes | function | python | 99,755 |
def _create_dependency_rule(self, package, dependencies, reason, reason_details=""):
literals = [-package.id]
for dependency in dependencies:
if dependency == package:
return
else:
literals.append(dependency.id)
return PackageRule(self.pool, literals, reason, reason_details, version_factory=package.version_factory) | function | python | 99,756 |
static int exprMightBeIndexed(
SrcList *pFrom,
Bitmask mPrereq,
Expr *pExpr,
int *piCur,
int *piColumn
){
Index *pIdx;
int i;
int iCur;
if( pExpr->op==TK_COLUMN ){
*piCur = pExpr->iTable;
*piColumn = pExpr->iColumn;
return 1;
}
if( mPrereq==0 ) return 0;
if( (mPrereq&(mPrereq-1))!=0 ) return 0;
for(i=0; mPrereq>1; i++, mPrereq>>=1){}
iCur = pFrom->a[i].iCursor;
for(pIdx=pFrom->a[i].pTab->pIndex; pIdx; pIdx=pIdx->pNext){
if( pIdx->aColExpr==0 ) continue;
for(i=0; i<pIdx->nKeyCol; i++){
if( pIdx->aiColumn[i]!=(-2) ) continue;
if( sqlite3ExprCompare(pExpr, pIdx->aColExpr->a[i].pExpr, iCur)==0 ){
*piCur = iCur;
*piColumn = -2;
return 1;
}
}
}
return 0;
} | function | c | 99,757 |
func (ys *YamlSection) Yaml() (string, error) {
s, err := yaml.Marshal(ys.Viper.Sub("data").AllSettings())
if err != nil {
return "", fmt.Errorf("Failed to export yaml: %v", err)
}
return string(s), nil
} | function | go | 99,758 |
def build_loss(config: DictConfig):
assert "name" in config, f"name not provided for loss: {config}"
config = OmegaConf.to_container(config, resolve=True)
name = config["name"]
args = config["init_args"]
if args is not None:
if "weight" in args and args["weight"] is not None:
args["weight"] = torch.tensor(args["weight"], dtype=torch.float)
if name in LOSS_REGISTRY:
instance = LOSS_REGISTRY.get(name)
else:
assert hasattr(torch_losses, name), (
f"{name} isn't a registered loss"
", nor is it available in torch.nn.modules.loss"
)
instance = getattr(torch_losses, name)
if args is not None:
loss = instance(**args)
else:
loss = instance()
_logger.debug("Built loss function: {}".format(loss.__class__.__name__))
return loss | function | python | 99,759 |
public static List<LdapEntry> GetUsers(string domainName, string adminName, string adminPassword)
{
int ldapPort = 389;
string domainNetbios = domainName.Split('.')[0];
string admin = string.Format("{0}\\{1}", domainNetbios.ToUpper(), adminName);
string domainFqn = "DC=" + domainName.Replace(".", ",DC=");
var userSearchBase = domainFqn;
var userSearchFilter = $"(objectClass=user)";
return GetLdapEntryList(domainName, ldapPort, admin, adminPassword, userSearchBase, userSearchFilter);
} | function | c# | 99,760 |
applyClip_(height, width) {
if (this.root_.classList.contains(MaterialMenu.cssClasses_.UNALIGNED)) {
this.list_.style.removeProperty('clip');
} else if (
this.root_.classList.contains(MaterialMenu.cssClasses_.BOTTOM_RIGHT)) {
this.list_.style.setProperty('clip', `rect(0 ${width}px 0 ${width}px)`);
} else if (this.root_.classList.contains(
MaterialMenu.cssClasses_.TOP_LEFT)) {
this.list_.style.setProperty('clip', `rect(${height}px 0 ${height}px 0)`);
} else if (this.root_.classList.contains(
MaterialMenu.cssClasses_.TOP_RIGHT)) {
this.list_.style.setProperty('clip',
`rect(${height}px ${width}px ${height}px ${width}px)`);
} else {
this.list_.style.removeProperty('clip');
}
} | function | javascript | 99,761 |
void Statement::Bind(int64_t inputPosition, const std::string& inputValue)
{
if(sqlite3_bind_text64(compiledStatement,inputPosition, inputValue.c_str(), inputValue.size(), SQLITE_TRANSIENT, SQLITE_UTF8) != SQLITE_OK)
{
throw SOMException("Error binding string to statement at position " + std::to_string(inputPosition) + "\n", SQLITE3_ERROR, __FILE__, __LINE__);
}
} | function | c++ | 99,762 |
func (batchResponse *BatchResponse) GetResponseOf(request *RPCRequest) (*RPCResponse, error) {
if request == nil {
return nil, errors.New("parameter cannot be nil")
}
for _, elem := range batchResponse.rpcResponses {
if elem.ID == request.ID {
return &elem, nil
}
}
return nil, fmt.Errorf("element with id %d not found", request.ID)
} | function | go | 99,763 |
private static String formatBase(TerritoryLog log, @Nullable String serverName) {
String heldForFormatted = FormatUtils.formatReadableTime(log.getTimeDiff() / 1000, false, "s");
return String.format(
"%s: *%s* (%s) → **%s** (%s) [%s]\n" +
" Territory held for %s\n",
log.getTerritoryName(),
log.getOldGuildName(), log.getOldGuildTerrAmt(),
log.getNewGuildName(), log.getNewGuildTerrAmt(),
serverName != null ? serverName : "No war",
heldForFormatted
);
} | function | java | 99,764 |
@Override
public Set<Map.Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = new AbstractSet<Map.Entry<K, V>>() {
@Override
public int size() {
return size;
}
@Override
public void clear() {
TreeMap.this.clear();
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object object) {
if (object instanceof Map.Entry) {
Map.Entry<K, V> entry = (Map.Entry<K, V>) object;
K key = entry.getKey();
Object v1 = get(key), v2 = entry.getValue();
return v1 == null ? (v2 == null && TreeMap.this.containsKey(key)) : v1.equals(v2);
}
return false;
}
@Override
public Iterator<Map.Entry<K, V>> iterator() {
return new UnboundedEntryIterator<K, V>(TreeMap.this);
}
};
}
return entrySet;
} | function | java | 99,765 |
pub fn from_posix_tz(tz_string: &str) -> Result<Self> {
if tz_string.is_empty() {
return Err(TzError::TzStringError(TzStringError::InvalidTzString("empty TZ string")));
}
if tz_string == "localtime" {
return parse_tz_file(&fs::read("/etc/localtime")?);
}
let read = |mut file: File| -> io::Result<_> {
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)?;
Ok(bytes)
};
let mut chars = tz_string.chars();
if chars.next() == Some(':') {
return parse_tz_file(&read(get_tz_file(chars.as_str())?)?);
}
match get_tz_file(tz_string) {
Ok(file) => parse_tz_file(&read(file)?),
Err(_) => {
let tz_string = tz_string.trim_matches(|c: char| c.is_ascii_whitespace());
// TZ string extensions are not allowed
let rule = parse_posix_tz(tz_string.as_bytes(), false)?;
let local_time_types = match &rule {
TransitionRule::Fixed(local_time_type) => NonEmptyVec::one(local_time_type.clone()),
TransitionRule::Alternate(AlternateTime { std, dst, .. }) => NonEmptyVec { first: std.clone(), tail: vec![dst.clone()] },
};
TimeZone::new(vec![], local_time_types, vec![], Some(rule))
}
}
} | function | rust | 99,766 |
public static string EnsureEndsWith(this string input, string append)
{
if (!input.EndsWith(append))
{
return input + append;
}
return input;
} | function | c# | 99,767 |
def openapi_types():
lazy_import()
return {
'count': (int,),
'next': (str, none_type,),
'previous': (str, none_type,),
'results': ([Fetch],),
} | function | python | 99,768 |
@Category({MiscTests.class, SmallTests.class})
public class TestIPv6NIOServerSocketChannel {
private static final Log LOG = LogFactory.getLog(TestIPv6NIOServerSocketChannel.class);
/**
* Creates and binds a regular ServerSocket.
*/
private void bindServerSocket(InetAddress inetAddr) throws IOException {
while(true) {
int port = HBaseTestingUtility.randomFreePort();
InetSocketAddress addr = new InetSocketAddress(inetAddr, port);
ServerSocket serverSocket = null;
try {
serverSocket = new ServerSocket();
serverSocket.bind(addr);
break;
} catch (BindException ex) {
//continue
} finally {
if (serverSocket != null) {
serverSocket.close();
}
}
}
}
/**
* Creates a NIO ServerSocketChannel, and gets the ServerSocket from
* there. Then binds the obtained socket.
* This fails on Windows with Oracle JDK1.6.0u33, if the passed InetAddress is a
* IPv6 address. Works on Oracle JDK 1.7.
*/
private void bindNIOServerSocket(InetAddress inetAddr) throws IOException {
while (true) {
int port = HBaseTestingUtility.randomFreePort();
InetSocketAddress addr = new InetSocketAddress(inetAddr, port);
ServerSocketChannel channel = null;
ServerSocket serverSocket = null;
try {
channel = ServerSocketChannel.open();
serverSocket = channel.socket();
serverSocket.bind(addr); // This does not work
break;
} catch (BindException ex) {
//continue
} finally {
if (serverSocket != null) {
serverSocket.close();
}
if (channel != null) {
channel.close();
}
}
}
}
/**
* Checks whether we are effected by the JDK issue on windows, and if so
* ensures that we are running with preferIPv4Stack=true.
*/
@Test
public void testServerSocket() throws IOException {
byte[] addr = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 };
InetAddress inetAddr = InetAddress.getByAddress(addr);
try {
bindServerSocket(inetAddr);
bindNIOServerSocket(inetAddr);
//if on *nix or windows JDK7, both will pass
} catch(java.net.SocketException ex) {
//On Windows JDK6, we will get expected exception:
//java.net.SocketException: Address family not supported by protocol family
//or java.net.SocketException: Protocol family not supported
Assert.assertFalse(ex.getClass().isInstance(BindException.class));
Assert.assertTrue(ex.getMessage().toLowerCase().contains("protocol family"));
LOG.info("Received expected exception:");
LOG.info(ex);
//if this is the case, ensure that we are running on preferIPv4=true
ensurePreferIPv4();
}
}
/**
* Checks whether we are running with java.net.preferIPv4Stack=true
*/
public void ensurePreferIPv4() throws IOException {
InetAddress[] addrs = InetAddress.getAllByName("localhost");
for (InetAddress addr : addrs) {
LOG.info("resolved localhost as:" + addr);
Assert.assertEquals(4, addr.getAddress().length); //ensure 4 byte ipv4 address
}
}
/**
* Tests whether every InetAddress we obtain by resolving can open a
* ServerSocketChannel.
*/
@Test
public void testServerSocketFromLocalhostResolution() throws IOException {
InetAddress[] addrs = InetAddress.getAllByName("localhost");
for (InetAddress addr : addrs) {
LOG.info("resolved localhost as:" + addr);
bindServerSocket(addr);
bindNIOServerSocket(addr);
}
}
public static void main(String[] args) throws Exception {
TestIPv6NIOServerSocketChannel test = new TestIPv6NIOServerSocketChannel();
test.testServerSocket();
test.testServerSocketFromLocalhostResolution();
}
} | class | java | 99,769 |
def _bundle_scans(self):
bases = []
bundles = []
for i in range(len(self.subdirs)):
bs = self.subdirs[i]
flag = True
while flag:
bs1 = os.path.split(bs)[0]
if bs1 == self.base_path.rstrip('/').rstrip('\\'):
flag = False
else:
bs = bs1
if bs not in bases:
bases.append(bs)
matches = [k for (k, name) in enumerate(self.subdirs) if re.findall(bs, name)]
bundles.append([self.subdirs[f] for f in matches])
self.subdirs = bundles | function | python | 99,770 |
@Test
public void testInstantiateWithArray() {
assertEquals(this.child, PA.instantiate(Child.class, new String[]{"Charlie", "Browne"}));
assertEquals(this.child, PA.instantiate(Child.class, new Class[]{String[].class}, new String[]{"Charlie", "Browne"}));
int[] ints = new int[]{1, 2};
child = PA.instantiate(Child.class, ints);
assertEquals(ints, PA.getValue(child, "privateInts"));
child = PA.instantiate(Child.class, new Class[]{int[].class}, ints);
assertEquals(ints, PA.getValue(child, "privateInts"));
} | function | java | 99,771 |
class BeanXmlLoaderTest {
private LoadFromResources loader = new LoadFromResources();
@Test
void testLoadBean() throws BeanXMLException {
testSimpleAndBean("nestedBean");
}
@Test
void testLoadBeanInclude() throws BeanXMLException {
testSimpleAndBean("nestedBeanInclude");
}
/**
* Loads XML that replaces a filepath for an include with a different filepath
*
* @throws BeanXMLException
*/
@Test
void testLoadBeanReplaceAttribute() throws BeanXMLException {
MockBeanNested bean = loader.loadBean("replaceBeanAttribute");
assertEquals("helloChanged", bean.getFieldSimpleNecessary());
}
/**
* Loads XML that replaces a bean with another bean
*
* @throws BeanXMLException
*/
@Test
void testLoadBeanReplaceElement() throws BeanXMLException {
testBean("replaceBeanElement", "world2");
}
/**
* Loads XML that replaces a bean with another bean
*
* @throws BeanXMLException
*/
@Test
void testLoadBeanReplaceInclude() throws BeanXMLException {
testBean("replaceBeanInclude", "worldAlternative");
}
/** A replace bean targeting a missing attribute */
@Test
void testLoadBeanReplaceAttributeMissing() {
assertException(() -> testSimple("replaceBeanAttributeMissing", "helloChanged"));
}
/** A replace bean targettng a missing attribute element */
@Test
void testLoadBeanReplaceElementMissing() {
assertException(() -> testBean("replaceBeanElementMissing", "world2"));
}
private void testSimple(String fileIdentifier, String expectedFieldValue)
throws BeanXMLException {
MockBeanNested bean = loader.loadBean(fileIdentifier);
assertEquals(expectedFieldValue, bean.getFieldSimpleNecessary());
}
private void testBean(String fileIdentifier, String expectedMessage) throws BeanXMLException {
MockBeanNested bean = loader.loadBean(fileIdentifier);
assertEquals(expectedMessage, bean.getFieldBeanNecessary().getMessage());
}
private void testSimpleAndBean(String fileIdentifier) throws BeanXMLException {
MockBeanNested bean = loader.loadBean(fileIdentifier);
assertEquals("hello", bean.getFieldSimpleNecessary());
assertEquals("world", bean.getFieldBeanNecessary().getMessage());
}
private static void assertException(Executable executable) {
assertThrows(BeanXMLException.class, executable);
}
} | class | java | 99,772 |
@Headers({
"Content-Type:application/json"
})
@POST(API_SUFFIX + "profiles/{profileid}/session/recording")
Call<Void> postRecording(
@Path("profileid") String profileid, @Body SessionRecordingOptions body
); | function | java | 99,773 |
protected void lbDeleteFolder_Click( object sender, EventArgs e )
{
try
{
string selectedPhysicalFolder = GetSelectedPhysicalFolder();
Directory.Delete( selectedPhysicalFolder, true );
string rootFolder = GetRootFolderPath();
string physicalRootFolder = this.MapPath( rootFolder );
string relativeFolder = selectedPhysicalFolder.Replace( physicalRootFolder, string.Empty );
hfSelectedFolder.Value = Path.GetDirectoryName( relativeFolder );
BuildFolderTreeView();
}
catch ( Exception ex )
{
string relativeFolderPath = hfSelectedFolder.Value;
this.ShowErrorMessage( ex, "An error occurred when attempting to delete folder " + relativeFolderPath );
}
} | function | c# | 99,774 |
float FinderPatternFinder::crossCheckVertical(size_t startI, size_t centerJ, int maxCount,
int originalStateCountTotal,
float& estimatedVerticalModuleSize) {
int maxI = image_->getHeight();
int stateCount[5];
for (int i = 0; i < 5; i++) stateCount[i] = 0;
if (!image_->get(centerJ, startI)) {
if ((int)startI + 1 < maxI && image_->get(centerJ, startI + 1))
startI = startI + 1;
else if (0 < (int)startI - 1 && image_->get(centerJ, startI - 1))
startI = startI - 1;
else
return nan();
}
BitMatrix& matrix = *image_;
bool* imageRow0 = matrix.getRowBoolPtr(0);
bool* p = imageRow0;
int imgWidth = matrix.getWidth();
int ii = startI;
p = imageRow0 + ii * imgWidth + centerJ;
while (ii >= 0 && *p) {
stateCount[2]++;
ii--;
p -= imgWidth;
}
if (ii < 0) {
return nan();
}
while (ii >= 0 && !*p && stateCount[1] <= maxCount) {
stateCount[1]++;
ii--;
p -= imgWidth;
}
if (ii < 0 || stateCount[1] > maxCount) {
return nan();
}
CrossCheckState tmpCheckState = FinderPatternFinder::NORMAL;
while (ii >= 0 && *p ) {
stateCount[0]++;
ii--;
p -= imgWidth;
}
if (stateCount[0] >= maxCount) {
tmpCheckState = FinderPatternFinder::LEFT_SPILL;
}
ii = startI + 1;
p = imageRow0 + ii * imgWidth + centerJ;
while (ii < maxI && *p) {
stateCount[2]++;
ii++;
p += imgWidth;
}
if (ii == maxI) {
return nan();
}
while (ii < maxI && !*p && stateCount[3] < maxCount) {
stateCount[3]++;
ii++;
p += imgWidth;
}
if (ii == maxI || stateCount[3] >= maxCount) {
return nan();
}
if (tmpCheckState == FinderPatternFinder::LEFT_SPILL) {
while (ii < maxI && *p && stateCount[4] < maxCount) {
stateCount[4]++;
ii++;
p += imgWidth;
}
if (stateCount[4] >= maxCount) {
return nan();
}
} else {
while (ii < maxI && *p) {
stateCount[4]++;
ii++;
p += imgWidth;
}
if (stateCount[4] >= maxCount) {
tmpCheckState = FinderPatternFinder::RIHGT_SPILL;
}
}
bool vertical_check = foundPatternCross(stateCount);
if (!vertical_check) return nan();
if ((CURRENT_CHECK_STATE == FinderPatternFinder::LEFT_SPILL &&
tmpCheckState == FinderPatternFinder::RIHGT_SPILL) ||
(CURRENT_CHECK_STATE == FinderPatternFinder::RIHGT_SPILL &&
tmpCheckState == FinderPatternFinder::LEFT_SPILL)) {
return nan();
}
int stateCountTotal = getStateCountTotal(stateCount, CURRENT_CHECK_STATE);
if (5 * abs(stateCountTotal - originalStateCountTotal) >= 2 * originalStateCountTotal) {
return nan();
}
estimatedVerticalModuleSize = (float)stateCountTotal / 7.0f;
return centerFromEnd(stateCount, ii);
} | function | c++ | 99,775 |
struct ipmi_request *
ipmi_dequeue_request(struct ipmi_softc *sc)
{
struct ipmi_request *req;
IPMI_LOCK_ASSERT(sc);
while (!sc->ipmi_detaching && TAILQ_EMPTY(&sc->ipmi_pending_requests))
cv_wait(&sc->ipmi_request_added, &sc->ipmi_requests_lock);
if (sc->ipmi_detaching)
return (NULL);
req = TAILQ_FIRST(&sc->ipmi_pending_requests);
TAILQ_REMOVE(&sc->ipmi_pending_requests, req, ir_link);
return (req);
} | function | c | 99,776 |
class EEMD:
"""
**Ensemble Empirical Mode Decomposition**
Ensemble empirical mode decomposition (EEMD) [Wu2009]_
is noise-assisted technique, which is meant to be more robust
than simple Empirical Mode Decomposition (EMD). The robustness is
checked by performing many decompositions on signals slightly
perturbed from their initial position. In the grand average over
all IMF results the noise will cancel each other out and the result
is pure decomposition.
Parameters
----------
trials : int (default: 100)
Number of trials or EMD performance with added noise.
noise_width : float (default: 0.05)
Standard deviation of Gaussian noise (:math:`\hat\sigma`).
It's relative to absolute amplitude of the signal, i.e.
:math:`\hat\sigma = \sigma\cdot|\max(S)-\min(S)|`, where
:math:`\sigma` is noise_width.
ext_EMD : EMD (default: None)
One can pass EMD object defined outside, which will be
used to compute IMF decompositions in each trial. If none
is passed then EMD with default options is used.
parallel : bool (default: False)
Flag whether to use multiprocessing in EEMD execution.
Since each EMD(s+noise) is independent this should improve execution
speed considerably.
*Note* that it's disabled by default because it's the most common
problem when EEMD takes too long time to finish.
If you set the flag to True, make also sure to set `processes` to
some reasonable value.
processes : int or None (optional)
Number of processes harness when executing in parallel mode.
The value should be between 1 and max that depends on your hardware.
separate_trends : bool (default: False)
Flag whether to isolate trends from each EMD decomposition into a separate component.
If `true`, the resulting EEMD will contain ensemble only from IMFs and
the mean residue will be stacked as the last element.
References
----------
.. [Wu2009] Z. Wu and N. E. Huang, "Ensemble empirical mode decomposition:
A noise-assisted data analysis method", Advances in Adaptive
Data Analysis, Vol. 1, No. 1 (2009) 1-41.
"""
logger = logging.getLogger(__name__)
noise_kinds_all = ["normal", "uniform"]
def __init__(self, trials: int = 100, noise_width: float = 0.05, ext_EMD = None, parallel: bool = False, **kwargs):
# Ensemble constants
self.trials = trials
self.noise_width = noise_width
self.separate_trends = bool(kwargs.get('separate_trends', False))
self.random = np.random.RandomState()
self.noise_kind = kwargs.get('noise_kind', 'normal')
self.parallel = parallel
self.processes = kwargs.get('processes') # Optional[int]
if self.processes is not None and not self.parallel:
self.logger.warning("Passed value for process has no effect when `parallel` is False.")
if ext_EMD is None:
from PyEMD import EMD
self.EMD = EMD(**kwargs)
else:
self.EMD = ext_EMD
self.E_IMF = None # Optional[np.ndarray]
self.residue = None # Optional[np.ndarray]
self._all_imfs = {}
def __call__(self, S: np.ndarray, T: Optional[np.ndarray] = None, max_imf: int = -1) -> np.ndarray:
return self.eemd(S, T=T, max_imf=max_imf)
def __getstate__(self) -> Dict:
self_dict = self.__dict__.copy()
if 'pool' in self_dict:
del self_dict['pool']
return self_dict
def generate_noise(self, scale: float, size: Union[int, Sequence[int]]) -> np.ndarray:
"""
Generate noise with specified parameters.
Currently supported distributions are:
* *normal* with std equal scale.
* *uniform* with range [-scale/2, scale/2].
Parameters
----------
scale : float
Width for the distribution.
size : int
Number of generated samples.
Returns
-------
noise : numpy array
Noise sampled from selected distribution.
"""
if self.noise_kind == "normal":
noise = self.random.normal(loc=0, scale=scale, size=size)
elif self.noise_kind == "uniform":
noise = self.random.uniform(low=-scale/2, high=scale/2, size=size)
else:
raise ValueError("Unsupported noise kind. Please assigned `noise_kind` to be one of these: {0}".format(
str(self.noise_kinds_all)))
return noise
def noise_seed(self, seed: int) -> None:
"""Set seed for noise generation."""
self.random.seed(seed)
def eemd(self, S: np.ndarray, T: Optional[np.ndarray] = None, max_imf: int = -1) -> np.ndarray:
"""
Performs EEMD on provided signal.
For a large number of iterations defined by `trials` attr
the method performs :py:meth:`emd` on a signal with added white noise.
Parameters
----------
S : numpy array,
Input signal on which EEMD is performed.
T : numpy array or None, (default: None)
If none passed samples are numerated.
max_imf : int, (default: -1)
Defines up to how many IMFs each decomposition should
be performed. By default (negative value) it decomposes
all IMFs.
Returns
-------
eIMF : numpy array
Set of ensemble IMFs produced from input signal. In general,
these do not have to be, and most likely will not be, same as IMFs
produced using EMD.
"""
if T is None:
T = get_timeline(len(S), S.dtype)
scale = self.noise_width*np.abs(np.max(S)-np.min(S))
self._S = S
self._T = T
self._N = len(S)
self._scale = scale
self.max_imf = max_imf
# For trial number of iterations perform EMD on a signal
# with added white noise
if self.parallel:
pool = Pool(processes=self.processes)
all_IMFs = pool.map(self._trial_update, range(self.trials))
pool.close()
else: # Not parallel
all_IMFs = map(self._trial_update, range(self.trials))
self._all_imfs = defaultdict(list)
for (imfs, trend) in all_IMFs:
# A bit of explanation here.
# If the `trend` is not None, that means it was intentionally separated in the decomp process.
# This might due to `separate_trends` flag which means that trends are summed up separately
# and treated as the last component. Since `proto_eimfs` is a dict, that `-1` is treated literally
# and **not** as the *last position*. We can then use that `-1` to always add it as the last pos
# in the actual eIMF, which indicates the trend.
if trend is not None:
self._all_imfs[-1].append(trend)
for imf_num, imf in enumerate(imfs):
self._all_imfs[imf_num].append(imf)
# Convert defaultdict back to dict and explicitly rename `-1` position to be {the last value} for consistency.
self._all_imfs = dict(self._all_imfs)
if -1 in self._all_imfs:
self._all_imfs[len(self._all_imfs)] = self._all_imfs.pop(-1)
for imf_num in self._all_imfs.keys():
self._all_imfs[imf_num] = np.array(self._all_imfs[imf_num])
self.E_IMF = self.ensemble_mean()
self.residue = S - np.sum(self.E_IMF, axis=0)
return self.E_IMF
def _trial_update(self, trial) -> Tuple[np.ndarray, Optional[np.ndarray]]:
"""A single trial evaluation, i.e. EMD(signal + noise).
*Note*: Although `trial` argument isn't used it's needed for the (multiprocessing) map method.
"""
noise = self.generate_noise(self._scale, self._N)
imfs = self.emd(self._S+noise, self._T, self.max_imf)
trend = None
if self.separate_trends:
imfs, trend = self.EMD.get_imfs_and_trend()
return (imfs, trend)
def emd(self, S: np.ndarray, T: np.ndarray, max_imf: int = -1) -> np.ndarray:
"""Vanilla EMD method.
Provides emd evaluation from provided EMD class.
For reference please see :class:`PyEMD.EMD`.
"""
return self.EMD.emd(S, T, max_imf)
def get_imfs_and_residue(self) -> Tuple[np.ndarray, np.ndarray]:
"""
Provides access to separated imfs and residue from recently analysed signal.
Returns
-------
(imfs, residue) : (np.ndarray, np.ndarray)
Tuple that contains all imfs and a residue (if any).
"""
if self.E_IMF is None or self.residue is None:
raise ValueError('No IMF found. Please, run EMD method or its variant first.')
return self.E_IMF, self.residue
@property
def all_imfs(self):
"""A dictionary with all computed imfs per given order."""
return self._all_imfs
def ensemble_count(self) -> List[int]:
"""Count of imfs observed for given order, e.g. 1st proto-imf, in the whole ensemble."""
return [len(imfs) for imfs in self._all_imfs.values()]
def ensemble_mean(self) -> np.ndarray:
"""Pointwise mean over computed ensemble. Same as the output of `eemd()` method."""
return np.array([imfs.mean(axis=0) for imfs in self._all_imfs.values()])
def ensemble_std(self) -> np.ndarray:
"""Pointwise standard deviation over computed ensemble."""
return np.array([imfs.std(axis=0) for imfs in self._all_imfs.values()]) | class | python | 99,777 |
void Service::removeClient(size_t i) {
assert(i < activeClientCount);
#ifndef GAMEDEVWEBTOOLS_NO_WEBSOCKETS
wsclients[i].~Server();
#endif
clients[i].~Listener();
for(auto j = i+1;j<activeClientCount;++j){
#ifndef GAMEDEVWEBTOOLS_NO_WEBSOCKETS
memcpy(&wsclients[j-1],&wsclients[j],
sizeof(network::websocket::Server));
wsclients[j-1].net = &clients[j-1];
#endif
memcpy(&clients[j-1],&clients[j],sizeof(network::Listener));
}
activeClientCount--;
} | function | c++ | 99,778 |
func (c context) bestMatch(mrs header.MediaRanges, languages header.PrecedenceValues, available offer.Offers, vary []string) (best *offer.Match) {
remaining := c.removeExcludedOffers(mrs, available)
foundCtMatch := false
for i := 1; i <= 2; i++ {
for _, offer := range remaining {
best, foundCtMatch = c.findBestMatch(mrs, languages, offer, vary, exactMatch, equalOrPrefix, "exact")
if best != nil {
return best
}
}
for _, offer := range remaining {
best, foundCtMatch = c.findBestMatch(mrs, languages, offer, vary, nearMatch, equalOrWildcard, "near")
if best != nil {
return best
}
}
if foundCtMatch {
languages = header.WildcardPrecedenceValue
} else {
break
}
}
Debug("%s is not acceptable for %d offers (%d available)\n", c, len(remaining), len(available))
return nil
} | function | go | 99,779 |
public class LeagueUserValidator<TUser> : IUserValidator<TUser> where TUser : ApplicationUser
{
public LeagueUserValidator(IOptions<LeagueUserValidatorOptions> serviceConfig, IdentityErrorDescriber describer)
{
Describer = describer as MultiLanguageIdentityErrorDescriber;
RequiredUsernameLength = serviceConfig.Value.RequiredUsernameLength;
}
public MultiLanguageIdentityErrorDescriber Describer { get; }
public int RequiredUsernameLength { get; set; }
public virtual async Task<IdentityResult> ValidateAsync(UserManager<TUser> manager, TUser user)
{
_ = manager ?? throw new ArgumentNullException(nameof(manager));
_ = user ?? throw new ArgumentNullException(nameof(user));
var errors = new List<IdentityError>();
await ValidateUserName(manager, user, errors);
return errors.Count > 0 ? IdentityResult.Failed(errors.ToArray()) : IdentityResult.Success;
}
private async Task ValidateUserName(UserManager<TUser> manager, TUser user, ICollection<IdentityError> errors)
{
var userName = await manager.GetUserNameAsync(user);
if (userName?.Length < RequiredUsernameLength)
{
errors.Add(Describer.UsernameTooShort());
}
}
} | class | c# | 99,780 |
[IsVisibleInDynamoLibrary(true)]
public static List<string> TransferViewTemplateAndFilter(Document LinkDocument, bool IsIncludeFilters = true)
{
var TemplateNames = new List<string>();
var doc = DocumentManager.Instance.CurrentDBDocument;
var views = new FilteredElementCollector(LinkDocument).OfClass(typeof(View)).Cast<View>().Where(x => x.IsTemplate).ToList();
var ids = new List<ElementId>();
foreach (var view in views) { ids.Add(view.Id); }
TransactionManager.Instance.EnsureInTransaction(doc);
var templates = ElementTransformUtils.CopyElements(LinkDocument, ids, doc, Transform.Identity,
new CopyPasteOptions());
foreach (var i in templates) { TemplateNames.Add(doc.GetElement(i).Name); }
if (!IsIncludeFilters)
{
foreach (ElementId v in templates)
{
View view = doc.GetElement(v) as View;
var filters = view.GetFilters();
foreach (ElementId f in filters)
{
view.RemoveFilter(f);
}
}
}
TransactionManager.Instance.TransactionTaskDone();
return TemplateNames;
} | function | c# | 99,781 |
func secretDataToRelease(clusterName string, secretData []byte) (*Release, error) {
releaseData, err := base64.StdEncoding.DecodeString(string(secretData))
if err != nil {
return nil, err
}
releaseDataUnzipped, err := decompress(releaseData)
if err != nil {
return nil, err
}
var release Release
err = json.Unmarshal(releaseDataUnzipped, &release)
if err != nil {
return nil, err
}
release.Cluster = clusterName
return &release, nil
} | function | go | 99,782 |
public function process($item, $arguments = null, $inheritedScope = null)
{
$rewrite = $this->getRewriteHashLinks();
$origRewriteDefault = static::getRewriteHashLinksDefault();
static::setRewriteHashLinksDefault($rewrite);
SSViewer::$topLevel[] = $item;
$template = $this->chosen;
$cacheFile = TEMP_PATH . DIRECTORY_SEPARATOR . '.cache'
. str_replace(['\\','/',':'], '.', Director::makeRelative(realpath($template)));
$lastEdited = filemtime($template);
if (!file_exists($cacheFile) || filemtime($cacheFile) < $lastEdited) {
$content = file_get_contents($template);
$content = $this->parseTemplateContent($content, $template);
$fh = fopen($cacheFile, 'w');
fwrite($fh, $content);
fclose($fh);
}
$underlay = ['I18NNamespace' => basename($template)];
foreach (['Content', 'Layout'] as $subtemplate) {
$sub = $this->getSubtemplateFor($subtemplate);
if (!$sub) {
continue;
}
$underlay[$subtemplate] = function () use ($item, $arguments, $sub) {
$subtemplateViewer = clone $this;
$subtemplateViewer->includeRequirements(false);
$subtemplateViewer->setTemplate($sub);
if ($subtemplateViewer->exists()) {
return $subtemplateViewer->process($item, $arguments);
}
return null;
};
}
$output = $this->includeGeneratedTemplate($cacheFile, $item, $arguments, $underlay, $inheritedScope);
if ($this->includeRequirements) {
$output = Requirements::includeInHTML($output);
}
array_pop(SSViewer::$topLevel);
if ($rewrite) {
if (strpos($output, '<base') !== false) {
if ($rewrite === 'php') {
$thisURLRelativeToBase = <<<PHP
<?php echo \\SilverStripe\\Core\\Convert::raw2att(preg_replace("/^(\\\\/)+/", "/", \$_SERVER['REQUEST_URI'])); ?>
PHP;
} else {
$thisURLRelativeToBase = Convert::raw2att(preg_replace("/^(\\/)+/", "/", $_SERVER['REQUEST_URI']));
}
$output = preg_replace('/(<a[^>]+href *= *)"#/i', '\\1"' . $thisURLRelativeToBase . '#', $output);
}
}
$html = DBField::create_field('HTMLFragment', $output);
static::setRewriteHashLinksDefault($origRewriteDefault);
return $html;
} | function | php | 99,783 |
def push(self, act, force=False):
if act is not None and isinstance(act, DialogueAct):
if act in self.agenda and not force:
self.remove(act)
self.agenda.append(act)
else:
print("Error! Cannot add item %s in the agenda." % act) | function | python | 99,784 |
def plaquette(self, index):
r, c = index
if not self.code.is_plaquette(index):
raise IndexError('{} is not a plaquette index.'.format(index))
operator = 'Z' if self.code.is_primal(index) else 'X'
self.site(operator, (r - 1, c))
self.site(operator, (r + 1, c))
self.site(operator, (r, c - 1))
self.site(operator, (r, c + 1))
return self | function | python | 99,785 |
func (s *healthCheckCounter) internalWakeWithoutLock() {
if s.waitlist.Len() == 0 {
return
}
s.waitlist.deleteThrough(s.next-1, func(goner *waituntil, through int64) {
goner.ready.Close()
for _, g := range goner.readylist {
g.Close()
}
})
} | function | go | 99,786 |
def _update_by_document(self,
callback: Callable,
collection: Collection,
filter_path: List[str],
update_path: List[str]) -> None:
field_filter_path = copy(filter_path)
if self.field_name:
field_filter_path += [self.field_name]
filter_dotpath = '.'.join(field_filter_path)
if not update_path:
json_path = '$'
else:
json_path = '.'.join(f.replace('$[]', '[*]') for f in update_path)
json_path = json_path.replace('.[*]', '[*]')
parser = jsonpath_rw.parse(json_path)
find_fltr = {}
if not self._include_missed_fields and filter_dotpath:
find_fltr = {filter_dotpath: {'$exists': True}}
if self.document_cls:
find_fltr['_cls'] = self.document_cls
if flags.dry_run:
msg = '* db.%s.find(%s) -> [Loop](%s) -> db.%s.bulk_write(...)'
log.info(msg, collection.name, find_fltr, filter_dotpath, collection.name)
return
bulk_db = flags.database2
bulk_collection = bulk_db[collection.name]
buf = []
for doc in collection.find(find_fltr):
prev_doc = deepcopy(doc)
for embedded_doc in parser.find(doc):
embedded_doc = embedded_doc.value
if self.document_cls:
if embedded_doc is None:
continue
if not isinstance(embedded_doc, dict):
if self.migration_policy.name == 'strict':
raise InconsistencyError(
f"Field {filter_dotpath} has wrong value {embedded_doc!r} "
f"(should be embedded document) in record {doc}"
)
else:
continue
if embedded_doc.get('_cls', self.document_cls) != self.document_cls:
continue
ctx = ByDocContext(collection=collection,
document=embedded_doc,
filter_dotpath=filter_dotpath)
callback(ctx)
if prev_doc != doc:
buf.append(ReplaceOne({'_id': doc['_id']}, doc, upsert=False))
if len(buf) >= flags.BULK_BUFFER_LENGTH:
bulk_collection.bulk_write(buf, ordered=False)
buf.clear()
if buf:
bulk_collection.bulk_write(buf, ordered=False)
buf.clear() | function | python | 99,787 |
async def leaderboards(
self, offset: int = 0, length: int = 100
) -> list[CampaignLeaderboard]:
leaderboards = []
leaderboard_data = get_from_cache(
f"campaign:{self.campaign_id}:{offset}:{length}"
)
if leaderboard_data is not None:
for lb_place in leaderboard_data.get("tops", []):
leaderboards.append(CampaignLeaderboard._from_dict(lb_place))
return leaderboards
api_client = _APIClient()
leaderboard_data = await api_client.get(
_TMIO.build([_TMIO.TABS.LEADERBOARD, self.leaderboard_uid])
+ f"?offset={offset}&length={length}"
)
await api_client.close()
with suppress(KeyError, TypeError):
raise TMIOException(leaderboard_data["error"])
set_in_cache(
f"campaign:{self.campaign_id}:{offset}:{length}",
leaderboard_data,
ex=432000,
)
for lb_place in leaderboard_data.get("tops", []):
leaderboards.append(CampaignLeaderboard._from_dict(lb_place))
return leaderboards | function | python | 99,788 |
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "FilterType")
public class FilterType
extends OriginalFilterType
{
@XmlAttribute(name = "active")
protected Boolean active;
@XmlAttribute(name = "title")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String title;
@XmlAttribute(name = "description")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String description;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String id;
@XmlAttribute(name = "filterType")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String filterType;
@XmlAttribute(name = "match")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String match;
@XmlAttribute(name = "type")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "token")
protected String type;
/**
* Gets the value of the active property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public boolean isActive() {
return active;
}
/**
* Sets the value of the active property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setActive(boolean value) {
this.active = value;
}
public boolean isSetActive() {
return (this.active!= null);
}
public void unsetActive() {
this.active = null;
}
/**
* Gets the value of the title property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTitle() {
return title;
}
/**
* Sets the value of the title property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTitle(String value) {
this.title = value;
}
public boolean isSetTitle() {
return (this.title!= null);
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
public boolean isSetDescription() {
return (this.description!= null);
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
public boolean isSetId() {
return (this.id!= null);
}
/**
* Gets the value of the filterType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFilterType() {
return filterType;
}
/**
* Sets the value of the filterType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFilterType(String value) {
this.filterType = value;
}
public boolean isSetFilterType() {
return (this.filterType!= null);
}
/**
* Gets the value of the match property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMatch() {
return match;
}
/**
* Sets the value of the match property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMatch(String value) {
this.match = value;
}
public boolean isSetMatch() {
return (this.match!= null);
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
public boolean isSetType() {
return (this.type!= null);
}
} | class | java | 99,789 |
int dr_screenshot(const char *filename, int x, int y, int w, int h)
{
#ifdef WIN32
if( dr_screenshot_png(filename, max(w,width-1), h, width, ((short unsigned int *)texture_map)+x+y*width, 16) ) {
return 1;
}
#endif
PALETTE pal;
get_palette(pal);
return save_bitmap(filename, texture_map, pal) == 0 ? 1 : -1;
} | function | c++ | 99,790 |
def sls_2nd_order(model, geometry, p, r=None, **kwargs):
forward = kwargs.get('forward', True)
space_order = p.space_order
save = kwargs.get('save', False)
save_t = geometry.nt if save else None
s = model.grid.stepping_dim.spacing
b = model.b
vp = model.vp
damp = model.damp
qp = model.qp
f0 = geometry._f0
q = kwargs.get('q', 0)
t_s = (sp.sqrt(1.+1./qp**2)-1./qp)/f0
t_ep = 1./(f0**2*t_s)
tt = (t_ep/t_s)-1.
rho = 1. / b
bm = rho * vp**2
r = r or TimeFunction(name="r", grid=model.grid, time_order=2,
space_order=space_order, save=save_t, staggered=NODE)
if forward:
pde_r = r + s * (tt / t_s) * rho * div(b * grad(p, shift=.5), shift=-.5) - \
s * (1. / t_s) * r
u_r = Eq(r.forward, damp * pde_r)
pde_p = 2. * p - damp * p.backward + s**2 * bm * (1. + tt) * \
div(b * grad(p, shift=.5), shift=-.5) - s**2 * vp**2 * \
r.forward + s**2 * vp**2 * q
u_p = Eq(p.forward, damp * pde_p)
return [u_r, u_p]
else:
pde_r = r + s * (tt / t_s) * p - s * (1. / t_s) * r
u_r = Eq(r.backward, damp * pde_r)
pde_p = 2. * p - damp * p.forward + s**2 * vp**2 * \
div(b * grad((1. + tt) * rho * p, shift=.5), shift=-.5) - s**2 * vp**2 * \
div(b * grad(rho * r.backward, shift=.5), shift=-.5)
u_p = Eq(p.backward, damp * pde_p)
return [u_r, u_p] | function | python | 99,791 |
private int readTestResult(final Launcher launcher) throws IOException, InterruptedException
{
int result = 0;
try
{
if (tttBuilder.getCreateResult())
{
VirtualChannel vChannel = launcher.getChannel();
FilePath testFolder = new FilePath(vChannel, tttBuilder.getFolderPath());
boolean usesNewExtension = TotalTestRunnerUtils.usesNewFileExtensions(launcher, listener, remoteFileSeparator);
boolean isSuite = true;
String resultFileName = null;
if (testFolder.exists() && testFolder.isDirectory() == false)
{
String fileName = testFolder.getName();
int idx = fileName.indexOf('.');
if (idx != -1)
{
String extension = fileName.substring(idx + 1);
if (extension.compareTo(FILE_EXT_XASUITE) == 0)
{
isSuite = true;
resultFileName =
String.format("%s.%s", fileName.substring(0, idx),
usesNewExtension ? FILE_EXT_XASUITE_RESULT : FILE_EXT_XASUITE_RESULT_OLD);
}
else if (extension.compareTo(FILE_EXT_XAUNIT) == 0 ||
extension.compareTo(FILE_EXT_XAUNIT_OLD) == 0 ||
extension.compareTo(FILE_EXT_CONTEXT) == 0 ||
extension.compareTo(FILE_EXT_CONTEXT_OLD) == 0)
{
isSuite = false;
resultFileName = String.format("%s.%s", fileName.substring(0, idx),
usesNewExtension ? FILE_EXT_RESULT : FILE_EXT_RESULT_OLD);
}
}
}
else
{
isSuite = true;
resultFileName = usesNewExtension ? GENERATED_SUITE_RESULT_FILE_NAME : GENERATED_SUITE_RESULT_FILE_NAME_OLD;
}
FilePath testSuiteResultPath = getOutputFilePath(launcher, listener, resultFileName);
if (testSuiteResultPath != null)
{
listener.getLogger().println("Found file path: " + testSuiteResultPath.getRemote());
}
else
{
FilePath workDir = new FilePath(vChannel, workspaceFilePath.getRemote());
testSuiteResultPath = new FilePath(workDir, resultFileName).absolutize();
listener.getLogger().println("The file path: " + testSuiteResultPath.getRemote() + " is missing."); //$NON-NLS-2$
}
listener.getLogger().println("TotalTest CLI script file remote path: " + testSuiteResultPath.getRemote());
listener.getLogger().println("Reading suite result from file: " + testSuiteResultPath.getRemote());
String content = null;
if (testSuiteResultPath.isRemote())
{
content = testSuiteResultPath.act(new TotalTestRunnerUtils.GetRemoteUTF8FileContents());
}
else
{
content = new String(Files.readAllBytes(Paths.get(testSuiteResultPath.getRemote())), StandardCharsets.UTF_8);
}
listener.getLogger().println("Result content:");
listener.getLogger().println(content);
Document document = getXaScenarioSuiteResultAsDocument(content);
String xaScenarioSuiteResult = getXaScenarioSuiteResult(document, isSuite);
String logMessage = String.format("Result state from %s: %s", isSuite ? FILE_EXT_XASUITE : FILE_EXT_XAUNIT , xaScenarioSuiteResult);
listener.getLogger().println(logMessage);
if (!xaScenarioSuiteResult.equalsIgnoreCase("SUCCESS"))
{
result = -1;
}
if (isSuite && result != -1 && tttBuilder.getCcThreshold() > 0)
{
listener.getLogger().println(
"The suite executed successfully, now checking that code coverage level is higher than the threshold on "
+ tttBuilder.getCcThreshold() + " %");
boolean isCCThresholdOk = getXaScenarioSuiteCodeCoverage(document, isSuite);
if (!isCCThresholdOk)
{
listener.getLogger().println("Code coverage threshold not reached");
result = -1;
}
}
}
}
catch (Exception e)
{
listener.getLogger().println("Exception in parsing XaSuiteResult. " + e.getMessage());
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
listener.getLogger().println(sw.toString());
}
return result;
} | function | java | 99,792 |
public static void ToBindControl(IGUIField control, params object[] args)
{
UIDescriptor desc = null;
MemberInfo member = null;
foreach (object obj in args)
{
if (obj is UIDescriptor) desc = (UIDescriptor)obj;
if (obj is MemberInfo) member = (MemberInfo)obj;
}
ToBindControl(control, desc, member);
} | function | c# | 99,793 |
function normalizeWikitext(wt, opts) {
if (opts.preDiff) {
wt = wt.replace(/^([*#:;]|\|[-+|]?|!!?)[ \t]*(.*?)[ \t]*$/mg, "$1$1");
wt = wt.replace(/^(=+)[ \t]*([^\n]*?)[ \t]*(=+)[ \t]*$/mg, "$1$2$3");
}
if (opts.newlines) {
wt = wt.replace(/\n*(\n=[^\n]*=$\n)\n*/mg, "$1");
wt = wt.replace(/(^[^*][^\n]*$\n)\n+([*])/mg, "$1$2");
wt = wt.replace(/(^[^#][^\n]*$\n)\n+([#])/mg, "$1$2");
wt = wt.replace(/(^[^:][^\n]*$\n)\n+([:])/mg, "$1$2");
wt = wt.replace(/(^[^;][^\n]*$\n)\n+([;])/mg, "$1$2");
wt = wt.replace(/(^[*][^\n]*$\n)\n+([^*])/mg, "$1$2");
wt = wt.replace(/(^[#][^\n]*$\n)\n+([^#])/mg, "$1$2");
wt = wt.replace(/(^[:][^\n]*$\n)\n+([^:])/mg, "$1$2");
wt = wt.replace(/(^[;][^\n]*$\n)\n+([^;])/mg, "$1$2");
wt = wt.replace(/\n+(\n{\|)/mg, "$1");
wt = wt.replace(/(\|}\n)\n+/mg, "$1");
wt = wt.replace(/^\n+|\n$/, '');
}
if (opts.postDiff) {
wt = wt.replace(/^\t/, ' ');
wt = wt.replace(/\n\t/g, '\n ');
wt = wt.replace(/ +/g, ' ');
wt = wt.replace(/<(\/?)([^ >\/]+)((?:[^>\/]|\/(?!>))*)\/?>/g,
function(match, close, name, remaining) {
return '<' + close + name.toLowerCase() +
remaining.replace(/ $/, '') + '>';
});
wt = wt.replace(/(^|\n|\|(?=\|)|!(?=!))(\{\||\|[\-+]*|!) *([^|\n]*?) *(?=[|\n]|$)/g, '$1$2$3');
wt = wt.replace(/style\s*=\s*"[^"]+"/g, function(match) {
return match.replace(/\s|;(?=")/g, '');
});
wt = wt.replace(/"([^"]*?)"/g, '$1');
wt = wt.replace(/(^|\n)<\/(?:small|center)>(?=\n[|!]|\n?$)/g, '');
wt = wt.replace(/([|!].*?)<\/(?:small|center)>(?=\n[|!]|\n?$)/gi, '$1');
}
return wt;
} | function | javascript | 99,794 |
QExifValue::QExifValue( const QString &value, TextEncoding encoding )
: d( qExifValuePrivateSharedNull() )
{
switch( encoding )
{
case AsciiEncoding:
d = new QExifUndefinedValuePrivate( QByteArray::fromRawData( "ASCII\0\0\0", 8 ) + value.toLatin1() );
break;
case JisEncoding:
{
QTextCodec *codec = QTextCodec::codecForName( "JIS X 0208" );
if( codec )
d = new QExifUndefinedValuePrivate( QByteArray::fromRawData( "JIS\0\0\0\0\0", 8 ) + codec->fromUnicode( value ) );
}
break;
case UnicodeEncoding:
{
QTextCodec *codec = QTextCodec::codecForName( "UTF-16" );
if( codec )
d = new QExifUndefinedValuePrivate( QByteArray::fromRawData( "UNICODE\0", 8 ) + codec->fromUnicode( value ) );
}
break;
case UndefinedEncoding:
d = new QExifUndefinedValuePrivate( QByteArray::fromRawData( "\0\0\0\0\0\0\0\\0", 8 ) + value.toLocal8Bit() );
break;
default:
d = new QExifAsciiValuePrivate( value );
}
} | function | c++ | 99,795 |
public static string Delete(AdoDataConnection database, int outputStreamID)
{
bool createdConnection = false;
try
{
createdConnection = CreateConnection(ref database);
DataTable outputStreamAcronym = database.Connection.RetrieveData(database.AdapterType, database.ParameterizedQueryString("SELECT Acronym FROM OutputStream WHERE ID = {0}", "outputStreamID"), DefaultTimeout, outputStreamID);
database.Connection.ExecuteNonQuery(database.ParameterizedQueryString("DELETE FROM OutputStream WHERE ID = {0}", "outputStreamID"), DefaultTimeout, outputStreamID);
database.Connection.ExecuteNonQuery(database.ParameterizedQueryString("DELETE FROM Measurement WHERE SignalReference LIKE '" + outputStreamAcronym.Rows[0].Field<string>("Acronym") + "!OS-ST%'"), DefaultTimeout);
CommonFunctions.SendCommandToService("ReloadConfig");
return "Output Stream Deleted Successfully";
}
finally
{
if (createdConnection)
database?.Dispose();
}
} | function | c# | 99,796 |
ErrorStatus md_fc_program(md_fc_ControlTypeDef *pProgramPara)
{
ErrorStatus status = ERROR;
uint16_t BCnt;
md_fc_set_pa_pcnt(pProgramPara->BCnt);
md_fc_set_pa_prog_addr(pProgramPara->SAddr);
BCnt = pProgramPara->BCnt;
BCnt >>= 3;
while (BCnt--)
{
if (pProgramPara->SAddr & pProgramPara->SAddrC)
return status;
md_fc_set_pld(*pProgramPara->pU32Buf++);
md_fc_set_phd(*pProgramPara->pU32Buf++);
md_fc_set_fc_cmd(MD_FC_PC_CMD_PROG_EFLASH);
}
status = SUCCESS;
return status;
} | function | c | 99,797 |
def _neumannBCLoss(self, X, Y, N = None):
sigma_xx, sigma_xy, sigma_yy = self.calculate_cauchy_stresses(X)
numEvals = X.shape[0]
MSE_sig_xy = torch.sum((sigma_xy - Y[:,0])**2) / numEvals
MSE_sig_yy = torch.sum((sigma_yy - Y[:,1])**2) / numEvals
loss_BC_Neu = MSE_sig_yy + MSE_sig_xy
return loss_BC_Neu | function | python | 99,798 |
def apply(self, fun=None, axis=0):
from .astfun import _bytecode_decompile_lambda
if axis not in [0,1]:
raise ValueError("margin must be either 0 (cols) or 1 (rows).")
if fun is None:
raise ValueError("No function to apply.")
if isinstance(fun, type(lambda:0)) and fun.__name__ == (lambda:0).__name__:
res = _bytecode_decompile_lambda(fun.__code__)
return H2OFrame._expr(expr=ExprNode("apply",self, 1+(axis==0),*res))
else:
raise ValueError("unimpl: not a lambda") | function | python | 99,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.