code
stringlengths 0
30.8k
| source
stringclasses 6
values | language
stringclasses 9
values | __index_level_0__
int64 0
100k
|
---|---|---|---|
private String createData(
Person person, Resume resume, List<Internship> internships, List<Project> projects, List<Skill> skills) {
StringBuilder data = new StringBuilder(resume.getName() + "\n");
data.append("=========================\n")
.append("PERSONAL DETAILS\n")
.append("=========================\n\n")
.append(person.toPreview())
.append("\n\n");
data.append("=========================\n")
.append("INTERNSHIPS\n")
.append("=========================\n\n");
internships.forEach(internship -> data.append(internship.toPreview()).append("\n"));
data.append("=========================\n")
.append("PROJECTS\n")
.append("=========================\n\n");
projects.forEach(project -> data.append(project.toPreview()).append("\n"));
data.append("=========================\n")
.append("SKILLS\n")
.append("=========================\n\n");
skills.forEach(skill -> data.append(skill.toPreview()).append("\n"));
return data.toString();
} | function | java | 100 |
async function togglePlayPause() {
return getPiPHostFrame()
.then(async result => {
await executeScript(result.tab, {file: "toggle_play_pause_video.js", allFrames: false, frameId: result.frameId});
return true;
})
.catch(async err => {
const audibleTabs = await getAudibleTabs();
var hasVideoToPause = false;
for (audibleTab of audibleTabs) {
if (isYoutubeTab(audibleTab)) {
hasVideoToPause = true
await executeScript(audibleTab, {file: "toggle_play_pause_video.js", allFrames: false, frameId: 0});
} else {
const allPlayingFrames = await getPlayingYTBFramesInTab(audibleTab);
console.log("Current playing frames", allPlayingFrames);
hasVideoToPause = allPlayingFrames.length > 0;
for (playingFrame of allPlayingFrames) {
await executeScript(playingFrame.tab, {file: "toggle_play_pause_video.js", allFrames: false, frameId: playingFrame.frameId});
}
}
}
return hasVideoToPause;
}).then(async alreadyPauseVideo => {
console.log("Should play a video", !alreadyPauseVideo);
if (!alreadyPauseVideo) {
const largestFrame = await getLargestYTBFrameInActiveTab();
await executeScript(largestFrame.tab, {file: "toggle_play_pause_video.js", allFrames: false, frameId: largestFrame.frameId});
}
});
} | function | javascript | 101 |
private static boolean compareRptLines(ReportLine compRpt,
ReportLine baseRpt, int decPlaces, boolean limitPrecision) {
boolean errorFound = false;
for (int i = 0; i < compRpt.getRow().length; i++) {
String compStr = compRpt.getRow()[i];
String baseStr = baseRpt.getRow()[i];
if (!BulkSecInfoTest.similarElements(compStr, baseStr, decPlaces, limitPrecision)) {
printErrorMessage(compRpt, baseRpt, i);
errorFound = true;
}
}
if (!errorFound) {
String info = "Account: " + compRpt.getRow()[0] + " Security: " + compRpt.getRow()[1];
System.out.println("Tested and Passed: " + info);
}
return errorFound;
} | function | java | 102 |
function shape(inputObject, shapeObj, options) {
const obj = EnforceContext.unwrap(inputObject);
const result = new RuleResult(true);
for (const key in shapeObj) {
const current = shapeObj[key];
const value = obj[key];
if (shouldFailFast(value, result)) {
break;
}
result.setChild(
key,
runLazyRule(
current,
new EnforceContext({ value, obj, key }).setFailFast(
inputObject.failFast
)
)
);
}
if (!(options || {}).loose) {
for (const key in obj) {
if (!hasOwnProperty(shapeObj, key)) {
return result.setFailed(true);
}
}
}
return result;
} | function | javascript | 103 |
public void removeRequestsFromQueue(
List<Long> requestIdList, Callback<List<RequestRemovedResult>> callback) {
long[] requestIds = new long[requestIdList.size()];
for (int i = 0; i < requestIdList.size(); i++) {
requestIds[i] = requestIdList.get(i).longValue();
}
nativeRemoveRequestsFromQueue(
mNativeOfflinePageBridge, requestIds, new RequestsRemovedCallback(callback));
} | function | java | 104 |
private String viewStory(String id) {
JSONObject jsObj = new JSONObject();
Story story = getStory(id);
if (story != null) {
jsObj.put("id", id);
jsObj.put("title", story.title);
jsObj.put("by", story.by);
jsObj.put("time", story.time);
if (story.url != null)
jsObj.put("url", story.url);
if (story.comments != null) {
JSONArray comms = new JSONArray();
Comment[] comments = getComments(story.comments);
for (Comment c : comments) {
if (c == null) continue;
JSONObject commObj = new JSONObject()
.put("id", c.id)
.put("text", c.text)
.put("by", c.by)
.put("time", c.time);
if (c.comments != null) {
commObj.put("comments", new JSONArray(c.comments));
}
comms.put(commObj);
}
jsObj.put("comments", comms);
}
logger.logStoryQueried(id, story.title);
}
return jsObj.toString();
} | function | java | 105 |
private void showDetails(View view, int position) {
if (mDualPane) {
EpisodesActivity activity = (EpisodesActivity) getActivity();
activity.setCurrentPage(position);
setItemChecked(position);
} else {
int episodeId = (int) getListView().getItemIdAtPosition(position);
Intent intent = new Intent();
intent.setClass(getActivity(), EpisodesActivity.class);
intent.putExtra(EpisodesActivity.InitBundle.EPISODE_TVDBID, episodeId);
Utils.startActivityWithAnimation(getActivity(), intent, view);
}
} | function | java | 106 |
@Override
public void initialize(AccessServiceConfig accessServiceConfigurationProperties,
OMRSTopicConnector enterpriseOMRSTopicConnector,
OMRSRepositoryConnector repositoryConnector,
AuditLog auditLog,
String serverUserName) throws OMAGConfigurationErrorException {
final String actionDescription = "initialize";
auditLog.logMessage(actionDescription, AssetCatalogAuditCode.SERVICE_INITIALIZING.getMessageDefinition());
try {
this.auditLog = auditLog;
List<String> supportedZones = this.extractSupportedZones(accessServiceConfigurationProperties.getAccessServiceOptions(),
accessServiceConfigurationProperties.getAccessServiceName(),
auditLog);
List<String> supportedTypesForSearch = getSupportedTypesForSearchOption(accessServiceConfigurationProperties);
instance = new AssetCatalogServicesInstance(repositoryConnector, supportedZones, auditLog, serverUserName,
accessServiceConfigurationProperties.getAccessServiceName(), supportedTypesForSearch);
boolean indexingEnabled = this.isIndexingEnabled(accessServiceConfigurationProperties);
if(indexingEnabled) {
registerListener(accessServiceConfigurationProperties,
enterpriseOMRSTopicConnector,
repositoryConnector,
auditLog);
}
this.serverName = instance.getServerName();
auditLog.logMessage(actionDescription, AssetCatalogAuditCode.SERVICE_INITIALIZED.getMessageDefinition(serverName));
} catch (Exception error) {
auditLog.logException(actionDescription, AssetCatalogAuditCode.SERVICE_INSTANCE_FAILURE.getMessageDefinition(error.getMessage(), serverName), error);
super.throwUnexpectedInitializationException(actionDescription, AccessServiceDescription.ASSET_CATALOG_OMAS.getAccessServiceFullName(), error);
}
} | function | java | 107 |
public abstract class PrimefacesLazyEntityDataModel<T extends PersistentObject> extends LazyDataModel<T> implements SelectableDataModel<T> {
/** Serialization id. */
private static final long serialVersionUID = 3299810696986238264L;
/** The logger. */
private static final Logger logger = Logger.getLogger(PrimefacesLazyEntityDataModel.class.getCanonicalName());
/** If not used in a CRUD, the controller should provide the DAO that can access the row data. */
private BaseDAO<T> entityDAO;
/**
* Constructor from superclass.
*
* @param entityDAO
* The DAO for objects of the type that populate the data model.
*/
public PrimefacesLazyEntityDataModel(BaseDAO<T> entityDAO) {
this.entityDAO = entityDAO;
}
/** @see org.primefaces.model.LazyDataModel#getRowKey(java.lang.Object) */
@Override
public Object getRowKey(T object) {
logger.log(Level.FINEST, "Obtaining the row key of object \"{0}\" from the data model", object);
return object.getUuid();
}
/** @see org.primefaces.model.LazyDataModel#getRowData(java.lang.String) */
@Override
public T getRowData(String rowKey) {
logger.log(Level.FINEST, "Obtaining the row data for key \"{0}\" from the data model", rowKey);
try {
return entityDAO.retrieveByUuid(rowKey);
}
catch (PersistentObjectNotFoundException e) {
logger.log(Level.WARNING, "Trying to obtain row data from entity with UUID {0} but no entity with that UUID was found");
return null;
}
catch (MultiplePersistentObjectsFoundException e) {
logger.log(Level.WARNING, "Trying to obtain row data from entity with UUID {0} but multiple entities with that UUID were found");
return null;
}
}
} | class | java | 108 |
public class QueueMessagingTemplate extends AbstractMessageChannelMessagingSendingTemplate<QueueMessageChannel> implements DestinationResolvingMessageReceivingOperations<QueueMessageChannel> {
private final AmazonSQSAsync amazonSqs;
public QueueMessagingTemplate(AmazonSQSAsync amazonSqs) {
this(amazonSqs, (ResourceIdResolver) null, null);
}
public QueueMessagingTemplate(AmazonSQSAsync amazonSqs, ResourceIdResolver resourceIdResolver) {
this(amazonSqs, resourceIdResolver, null);
}
/**
* Initializes the messaging template by configuring the resource Id resolver as well as the message
* converter. Uses the {@link DynamicQueueUrlDestinationResolver} with the default configuration to
* resolve destination names.
*
* @param amazonSqs
* The {@link AmazonSQS} client, cannot be {@code null}.
* @param resourceIdResolver
* The {@link ResourceIdResolver} to be used for resolving logical queue names.
* @param messageConverter
* A {@link MessageConverter} that is going to be added to the composite converter.
*/
public QueueMessagingTemplate(AmazonSQSAsync amazonSqs, ResourceIdResolver resourceIdResolver, MessageConverter messageConverter) {
this(amazonSqs, new DynamicQueueUrlDestinationResolver(amazonSqs, resourceIdResolver), messageConverter);
}
/**
* Initializes the messaging template by configuring the destination resolver as well as the message
* converter. Uses the {@link DynamicQueueUrlDestinationResolver} with the default configuration to
* resolve destination names.
*
* @param amazonSqs
* The {@link AmazonSQS} client, cannot be {@code null}.
* @param destinationResolver
* A destination resolver implementation to resolve queue names into queue urls. The
* destination resolver will be wrapped into a {@link org.springframework.messaging.core.CachingDestinationResolverProxy}
* to avoid duplicate queue url resolutions.
* @param messageConverter
* A {@link MessageConverter} that is going to be added to the composite converter.
*/
public QueueMessagingTemplate(AmazonSQSAsync amazonSqs, DestinationResolver<String> destinationResolver, MessageConverter messageConverter) {
super(destinationResolver);
this.amazonSqs = amazonSqs;
initMessageConverter(messageConverter);
}
@Override
protected QueueMessageChannel resolveMessageChannel(String physicalResourceIdentifier) {
return new QueueMessageChannel(this.amazonSqs, physicalResourceIdentifier);
}
@Override
public Message<?> receive() throws MessagingException {
return receive(getRequiredDefaultDestination());
}
@Override
public Message<?> receive(QueueMessageChannel destination) throws MessagingException {
return destination.receive();
}
@Override
public <T> T receiveAndConvert(Class<T> targetClass) throws MessagingException {
return receiveAndConvert(getRequiredDefaultDestination(), targetClass);
}
@SuppressWarnings("unchecked")
@Override
public <T> T receiveAndConvert(QueueMessageChannel destination, Class<T> targetClass) throws MessagingException {
Message<?> message = destination.receive();
if (message != null) {
return (T) getMessageConverter().fromMessage(message, targetClass);
} else {
return null;
}
}
@Override
public Message<?> receive(String destinationName) throws MessagingException {
return resolveMessageChannelByLogicalName(destinationName).receive();
}
@Override
public <T> T receiveAndConvert(String destinationName, Class<T> targetClass) throws MessagingException {
QueueMessageChannel channel = resolveMessageChannelByLogicalName(destinationName);
return receiveAndConvert(channel, targetClass);
}
} | class | java | 109 |
def replace_text(
self, doc: OpenDocumentText, tags: dict
) -> OpenDocumentText:
under_tags, check_tags, simple_tags, if_tags = self.split_tags(tags)
elements = doc.getElementsByType(odfText.H) + doc.getElementsByType(
odfText.P
)
flagged_elements = [
i for i in elements if search(r"{{[^{}]+?}}", str(i))
]
flagged_text_elements = self.flatten(
list(map(self.recursive_get_text, flagged_elements))
)
flagged_text_elements = [i for i in flagged_text_elements if i]
flagged_parent_text_elements = {}
for i in flagged_elements:
for j, k in self.recursive_get_parents(
i, flagged_text_elements
).items():
flagged_parent_text_elements[j] = k
for text in flagged_text_elements:
if not search(
self.BEFORE_FLAG + ".*?" + self.AFTER_FLAG, text.data
):
continue
if search(
self.BEFORE_FLAG
+ "UNDER"
+ self.SEPARATOR
+ ".*?"
+ self.AFTER_FLAG,
text.data,
):
parent_text = flagged_parent_text_elements[text]
parent_text = self.underline(
text, parent_text, under_tags, doc
)
elif search(
self.BEFORE_FLAG
+ "CHECK"
+ self.SEPARATOR
+ r".*?"
+ self.AFTER_FLAG,
text.data,
):
text.data = self.check_boxes(text.data, check_tags)
elif search(
self.BEFORE_FLAG
+ r".*?"
+ self.SEPARATOR
+ r"\d+?"
+ self.AFTER_FLAG,
text.data,
):
text.data = self.split_replace(text.data, simple_tags)
elif search(
self.BEFORE_FLAG
+ r".+?"
+ self.SEPARATOR
+ "THEN"
+ self.SEPARATOR
+ r".*?"
+ self.SEPARATOR
+ "ELSE"
+ self.SEPARATOR
+ r".*?"
+ self.SEPARATOR
+ "END"
+ self.AFTER_FLAG,
text.data,
):
text.data = self.if_replace(text.data, if_tags)
else:
text.data = self.simple_replace(text.data, simple_tags)
return doc | function | python | 110 |
[Test]
public void Creating_Integeres()
{
int example = 0;
int singleDigit = 6;
int number = 14;
int lessThenZero = -100;
Assert.AreEqual(example, 0, "Example Has wrong value did you changed it?");
Assert.AreEqual(singleDigit, 6, "Example Has wrong value did you changed it?");
Assert.That(singleDigit, Is.TypeOf<int>(), "You sure it is int?");
Assert.AreEqual(number, 14, "Example Has wrong value did you changed it?");
Assert.That(singleDigit, Is.TypeOf<int>(), "You sure it is int?");
Assert.AreEqual(lessThenZero, -100, "Example Has wrong value did you changed it?");
Assert.That(singleDigit, Is.TypeOf<int>(), "You sure it is int?");
} | function | c# | 111 |
def build_action_frame(master, label_text, label_width, command,
pre_msg=None):
frame = ttk.Frame(master=master)
frame.grid(sticky='w')
frame.rowconfigure(index=0, pad=10, weight=1)
frame.row_label = ttk.Label(
master=frame, text=label_text, width=label_width)
frame.row_label.grid(row=0, column=0, padx=5, sticky="w")
frame.button = ttk.Button(master=frame, text="Run", command=command)
frame.button.grid(row=0, column=1, padx=5)
if pre_msg is not None:
frame.button.bind(
sequence="<ButtonPress>", add='+',
func=partial(ODKToolsGui.textbox_pre_message, message=pre_msg))
return frame | function | python | 112 |
def encode_links_as_strings(links1, links2):
set1, set2 = set(links1), set(links2)
union = set1.union(set2)
mapping = {}
for item, letter in zip(list(union), ascii_lowercase):
mapping[item] = letter
string1 = ''.join([mapping[link] for link in links1])
string2 = ''.join([mapping[link] for link in links2])
return string1, string2 | function | python | 113 |
function selectOption(data) {
var num = Number(data)-1,
filename = files[num];
if (!filename) {
stdout.write(" Enter your choice: ".red);
}
else {
stdin.pause();
if (stats[num].isDirectory()) {
fs.readdir(__dirname+'/'+filename, function(err, files) {
console.log(("\n"+" "+files.length+" item(s) in directory:").yellow);
files.forEach(function (file) {
console.log(" - " + file);
});
console.log("");
});
}
else {
fs.readFile(__dirname+'/'+filename, 'utf8', function(err, data) {
console.log("\n" + data.replace(/(.*)/g, ' $1').grey);
});
}
}
} | function | javascript | 114 |
private float CalculateTab(Rectangle layoutBox, float curWidth, TabStop tabStop, IList<IRenderer> affectedRenderers
, IRenderer tabRenderer) {
float sumOfAffectedRendererWidths = 0;
foreach (IRenderer renderer in affectedRenderers) {
sumOfAffectedRendererWidths += renderer.GetOccupiedArea().GetBBox().GetWidth();
}
float tabWidth = 0;
switch (tabStop.GetTabAlignment()) {
case TabAlignment.RIGHT: {
tabWidth = tabStop.GetTabPosition() - curWidth - sumOfAffectedRendererWidths;
break;
}
case TabAlignment.CENTER: {
tabWidth = tabStop.GetTabPosition() - curWidth - sumOfAffectedRendererWidths / 2;
break;
}
case TabAlignment.ANCHOR: {
float anchorPosition = -1;
float processedRenderersWidth = 0;
foreach (IRenderer renderer in affectedRenderers) {
anchorPosition = ((TextRenderer)renderer).GetTabAnchorCharacterPosition();
if (-1 != anchorPosition) {
break;
}
else {
processedRenderersWidth += renderer.GetOccupiedArea().GetBBox().GetWidth();
}
}
if (anchorPosition == -1) {
anchorPosition = 0;
}
tabWidth = tabStop.GetTabPosition() - curWidth - anchorPosition - processedRenderersWidth;
break;
}
}
if (tabWidth < 0) {
tabWidth = 0;
}
if (curWidth + tabWidth + sumOfAffectedRendererWidths > layoutBox.GetWidth()) {
tabWidth -= (curWidth + sumOfAffectedRendererWidths + tabWidth) - layoutBox.GetWidth();
}
tabRenderer.SetProperty(Property.WIDTH, UnitValue.CreatePointValue(tabWidth));
tabRenderer.SetProperty(Property.MIN_HEIGHT, UnitValue.CreatePointValue(maxAscent - maxDescent));
return tabWidth;
} | function | c# | 115 |
class Davidson: public EigenSolver
{
public:
typedef EigenSolver Base;
typedef Base::SP_matrix SP_matrix;
typedef Base::SP_solver SP_solver;
typedef Base::SP_vector SP_vector;
typedef Preconditioner::SP_preconditioner SP_pc;
Davidson(const double tol = 1e-6,
const int maxit = 100,
const int subspace_size = 20);
virtual ~Davidson(){}
void set_operators(SP_matrix A,
SP_matrix B = SP_matrix(0),
SP_db db = SP_db(0));
void set_preconditioner(SP_pc P,
const int side = LinearSolver::LEFT);
private:
size_t d_subspace_size;
SP_pc d_P;
MatrixBase::SP_matrix d_A_minus_ritz_times_B;
SP_solver d_projected_solver;
void solve_impl(Vector &x, Vector &x0);
} | class | c++ | 116 |
public static async Task<HttpResponseData> OkResponse(this HttpRequestData request, string content)
{
var response = request.CreateResponse(HttpStatusCode.OK);
response.Headers.Add("Content-Type", "text/plain; charset=utf-8");
await response.WriteStringAsync(content).ConfigureAwait(false);
return response;
} | function | c# | 117 |
def try_get_authorized_client(
request_params, session, user_info):
register_acces_token_if_present(
request_params, session, user_info)
if not user_info.oauth_access_token \
or not user_info.oauth_access_token_secret:
return None
try:
client = build_twitter_client(user_info)
if client and client.get_actor() is not None:
return client
except urllib.error.HTTPError as e:
if e.code!=401:
raise
except urllib.error.HTTPError as e:
if e.code!=401:
raise
return None | function | python | 118 |
func (adb *AniDB) FilesByGID(ep *Episode, gid GID) <-chan *File {
ch := make(chan *File, 10)
fidChan := adb.FIDsByGID(ep, gid)
go func() {
chs := []<-chan *File{}
for fid := range fidChan {
chs = append(chs, adb.FileByID(fid))
}
for _, c := range chs {
for f := range c {
ch <- f
}
}
close(ch)
}()
return ch
} | function | go | 119 |
func (c *PseudonymsysCAClientEC) GenerateCertificate(userSecret *big.Int, nym *pseudonymsys.PseudonymEC) (
*pseudonymsys.CACertificateEC, error) {
if err := c.openStream(c.grpcClient, "GenerateCertificate_EC"); err != nil {
return nil, err
}
defer c.closeStream()
x := c.prover.GetProofRandomData(userSecret, nym.A)
pRandomData := pb.SchnorrECProofRandomData{
X: pb.ToPbECGroupElement(x),
A: pb.ToPbECGroupElement(nym.A),
B: pb.ToPbECGroupElement(nym.B),
}
initMsg := &pb.Message{
ClientId: c.id,
Content: &pb.Message_SchnorrEcProofRandomData{
&pRandomData,
},
}
resp, err := c.getResponseTo(initMsg)
if err != nil {
return nil, err
}
ch := resp.GetBigint()
challenge := new(big.Int).SetBytes(ch.X1)
z := c.prover.GetProofData(challenge)
msg := &pb.Message{
Content: &pb.Message_SchnorrProofData{
&pb.SchnorrProofData{
Z: z.Bytes(),
},
},
}
resp, err = c.getResponseTo(msg)
if err != nil {
return nil, err
}
cert := resp.GetPseudonymsysCaCertificateEc()
certificate := pseudonymsys.NewCACertificateEC(
cert.BlindedA.GetNativeType(),
cert.BlindedB.GetNativeType(),
new(big.Int).SetBytes(cert.R), new(big.Int).SetBytes(cert.S))
if err := c.genericClient.CloseSend(); err != nil {
return nil, err
}
return certificate, nil
} | function | go | 120 |
public PhasingStatsAndOutput map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if (tracker == null)
return null;
mostDownstreamLocusReached = ref.getLocus();
if (DEBUG) logger.debug("map() at: " + mostDownstreamLocusReached);
PhasingStats phaseStats = new PhasingStats();
List<VariantContext> unprocessedList = new LinkedList<VariantContext>();
for (VariantContext vc : tracker.getValues(variantCollection.variants, context.getLocation())) {
if (samplesToPhase != null) vc = reduceVCToSamples(vc, samplesToPhase);
if (ReadBackedPhasing.processVariantInPhasing(vc)) {
VariantAndReads vr = new VariantAndReads(vc, context);
unphasedSiteQueue.add(vr);
if (DEBUG)
logger.debug("Added variant to queue = " + VariantContextUtils.getLocation(getToolkit().getGenomeLocParser(), vr.variant));
}
else {
unprocessedList.add(vc);
if (DEBUG)
logger.debug("Unprocessed variant = " + VariantContextUtils.getLocation(getToolkit().getGenomeLocParser(), vc));
}
int numReads = context.getBasePileup().getNumberOfElements();
PhasingStats addInPhaseStats = new PhasingStats(numReads, 1);
phaseStats.addIn(addInPhaseStats);
}
List<VariantContext> completedList = processQueue(phaseStats, false);
completedList.addAll(unprocessedList);
return new PhasingStatsAndOutput(phaseStats, completedList);
} | function | java | 121 |
public boolean timeGoesBy() {
counter++;
switch (color) {
case GREEN:
if (counter > MAX_GREEN) {
color = color.next();
counter = 0;
}
break;
case AMBER:
if (!blink.isActive() && counter > MAX_AMBER) {
color = color.next();
return true;
}
}
return false;
} | function | java | 122 |
def drop(self, columns, inplace=False):
if isinstance(columns, str):
df = super(self.__class__, self).drop(columns)
elif isinstance(columns, (list, tuple)):
df = self
for column in columns:
df = df.drop(column)
if inplace:
self.__init__(df)
else:
return Manatee(df) | function | python | 123 |
def processPackages(api:to_api.API) -> bool:
from .configuration import Modes, MODE
try:
myPackages = api.getMyPackages()
except (ConnectionError, PermissionError) as e:
logging.error("Failed to fetch package list from Traffic Ops - %s", e)
logging.debug("%s", e, exc_info=True, stack_info=True)
return False
except ValueError as e:
logging.error("Got malformed response from Traffic Ops! - %s", e)
logging.debug("%s", e, exc_info=True, stack_info=True)
return False
for package in myPackages:
if package.install():
if MODE is not Modes.BADASS:
return False
logging.warning("Failed to install %s, but we're BADASS, so moving on!", package)
return True | function | python | 124 |
public void addFilter(String name, String... filter) {
if (filter.length < 1) {
throw new IllegalArgumentException();
}
ArrayList<String> parts = new ArrayList<String>();
parts.add(name);
Collections.addAll(parts, filter);
filters.add(parts.toArray(new String[parts.size()]));
} | function | java | 125 |
def _monte_carlo_trajectory_sampler(
time_horizon: int = None,
env: DynamicalSystem = None,
policy: BasePolicy = None,
state: np.ndarray = None,
):
@sample_generator
def _sample_generator():
state_sequence = []
state_sequence.append(state)
env.state = state
time = 0
for t in range(time_horizon):
action = policy(time=time, state=env.state)
next_state, cost, done, _ = env.step(time=t, action=action)
state_sequence.append(next_state)
time += 1
yield state_sequence
return _sample_generator | function | python | 126 |
class DpdkConfig:
"""Represent DPDK command-line options.
Attributes:
cores: List of cores to run on.
mem_channels: Number of memory channels to use.
drivers: Load external drivers. Can be a single shared object file, or
a directory containing multiple driver shared objects.
mem_alloc: Amount of memory to preallocate at startup.
mem_ranks: Set number of memory ranks.
xen_dom0: Support application running on Xen Domain0 without hugetlbfs.
syslog: Set syslog facility.
socket_mem: Preallocate specified amounts of memory per socket.
huge_dir: Use specified hugetlbfs directory instead of autodetected
ones. This can be a sub-directory within a hugetlbfs mountpoint.
proc_type: Set the type of the current process. (`primary`,
`secondary`, or `auto`)
file_prefix: Use a different shared data file prefix for a DPDK
process. This option allows running multiple independent DPDK
primary/secondary processes under different prefixes.
pci_block_list: Skip probing specified PCI device to prevent EAL from
using it.
pci_allow_list: Add PCI devices in to the list of devices to probe.
vdev: Add a virtual device using the format:
`<driver><id>[,key=val, ...]`.
vmware_tsc_map: Use VMware TSC map instead of native RDTSC.
base_virtaddr: Attempt to use a different starting address for all
memory maps of the primary DPDK process. This can be helpful if
secondary processes cannot start due to conflicts in address map.
vfio_intr: Use specified interrupt mode for devices bound to VFIO
kernel driver. (`legacy`, `msi`, or `msix`)
create_uio_dev: Create `/dev/uioX` files for devices bound to igb_uio
kernel driver (usually done by the igb_uio driver itself).
extra_opt: Extra command-line options.
Examples:
Obtaining the DPDK configuration in command-line option format:
>>> dpdk_config = DpdkConfig([0, 2], 4, pci_allow_list='05:00.0')
>>> str(dpdk_config)
'-l 0,2 -n 4 -a 05:00.0'
"""
def __init__(self, cores: Iterable[int], mem_channels: int,
drivers: Optional[Iterable[str]] = None,
mem_alloc: Optional[int] = None,
mem_ranks: Optional[int] = None, xen_dom0: bool = False,
syslog: bool = False,
socket_mem: Optional[Iterable[int]] = None,
huge_dir: Optional[str] = None,
proc_type: Optional[str] = None,
file_prefix: Optional[str] = None,
pci_block_list: Optional[Iterable[str]] = None,
pci_allow_list: Optional[Iterable[str]] = None,
vdev: Optional[str] = None, vmware_tsc_map: bool = False,
base_virtaddr: Optional[str] = None,
vfio_intr: Optional[str] = None, create_uio_dev: bool = False,
extra_opt: Optional[str] = None) -> None:
self.cores = cores
self.mem_channels = mem_channels
self.drivers = drivers
self.mem_alloc = mem_alloc
self.mem_ranks = mem_ranks
self.xen_dom0 = xen_dom0
self.syslog = syslog
self.socket_mem = socket_mem
self.huge_dir = huge_dir
self.proc_type = proc_type
self.file_prefix = file_prefix
self.pci_block_list = pci_block_list
self.pci_allow_list = pci_allow_list
self.vdev = vdev
self.vmware_tsc_map = vmware_tsc_map
self.base_virtaddr = base_virtaddr
self.vfio_intr = vfio_intr
self.create_uio_dev = create_uio_dev
self.extra_opt = extra_opt
if drivers is not None and not isinstance(drivers, list):
self.drivers = [self.drivers]
if pci_allow_list is not None and not isinstance(pci_allow_list, list):
self.pci_allow_list = [self.pci_allow_list]
if pci_block_list is not None and not isinstance(pci_block_list, list):
self.pci_block_list = [self.pci_block_list]
def __str__(self) -> str:
opts = '-l ' + ','.join(str(c) for c in self.cores)
opts += f' -n {self.mem_channels}'
if self.drivers is not None:
for driver in self.drivers:
opts += f' -d {driver}'
if self.mem_alloc is not None:
opts += f' -m {self.mem_alloc}'
if self.mem_ranks is not None:
opts += f' -r {self.mem_ranks}'
if self.xen_dom0:
opts += ' --xen-dom0'
if self.syslog:
opts += ' --syslog'
if self.socket_mem is not None:
opt = ','.join(str(sm) for sm in self.socket_mem)
opts += f' --socket-mem {opt}'
if self.huge_dir is not None:
opts += f' --huge-dir {self.huge_dir}'
if self.proc_type is not None:
opts += f' --proc-type {self.proc_type}'
if self.file_prefix is not None:
opts += f' --file-prefix {self.file_prefix}'
if self.pci_block_list is not None:
for pci_block_list in self.pci_block_list:
opts += f' -b {pci_block_list}'
if self.pci_allow_list is not None:
for pci_allow_list in self.pci_allow_list:
opts += f' -a {pci_allow_list}'
if self.vdev is not None:
opts += f' --vdev {self.vdev}'
if self.vmware_tsc_map:
opts += ' --vmware-tsc-map'
if self.base_virtaddr is not None:
opts += f' --base-virt-addr {self.base_virtaddr}'
if self.vfio_intr is not None:
opts += f' --vfio-intr {self.vfio_intr}'
if self.create_uio_dev:
opts += ' --create-uio-dev'
if self.extra_opt is not None:
opts += self.extra_opt
return opts | class | python | 127 |
protected RestOperations createRestOperations(Object resource, ContextProperties properties, BeanStore beanStore) throws Exception {
RestOperations x = createRestOperationsBuilder(resource, properties, beanStore).build();
x = BeanStore
.of(beanStore, resource)
.addBean(RestOperations.class, x)
.beanCreateMethodFinder(RestOperations.class, resource)
.find("createRestOperations")
.withDefault(x)
.run();
return x;
} | function | java | 128 |
int pidfd_create(struct pid *pid, unsigned int flags)
{
int fd;
if (!pid || !pid_has_task(pid, PIDTYPE_TGID))
return -EINVAL;
if (flags & ~(O_NONBLOCK | O_RDWR | O_CLOEXEC))
return -EINVAL;
fd = anon_inode_getfd("[pidfd]", &pidfd_fops, get_pid(pid),
flags | O_RDWR | O_CLOEXEC);
if (fd < 0)
put_pid(pid);
return fd;
} | function | c | 129 |
public class Demultiplexor : IObserver<object>
{
private readonly Dictionary<Type, IObserver<object>> _outputs = new Dictionary<Type, IObserver<object>>();
private readonly Dictionary<Type, List<Type>> _knownOutputMappings = new Dictionary<Type, List<Type>>();
public void OnCompleted()
{
foreach (var output in _outputs.Values.ToArray())
{
output.OnCompleted();
}
}
public void OnError(Exception error)
{
foreach (var output in _outputs.Values)
{
output.OnError(error);
}
}
public void OnNext(object inputObject)
{
var inputObjectType = inputObject.GetType();
List<Type> outputKeys;
_knownOutputMappings.TryGetValue(inputObjectType, out outputKeys);
if (outputKeys == null)
{
outputKeys = new List<Type>();
_knownOutputMappings.Add(inputObjectType, outputKeys);
outputKeys.AddRange(GetTypes(inputObjectType).Where(type => _outputs.ContainsKey(type)));
}
foreach (var keyType in outputKeys)
{
_outputs[keyType].OnNext(inputObject);
}
}
public IObservable<TOutput> GetObservable<TOutput>()
{
IObserver<object> o;
if (!_outputs.TryGetValue(typeof(TOutput), out o))
{
o = new OutputSubject<TOutput>();
_outputs.Add(typeof(TOutput), o);
RefreshKnownOutputMappings(typeof(TOutput));
}
var output = (IObservable<TOutput>)o;
return output;
}
private static List<Type> GetTypes(Type inputType)
{
var typeList = new List<Type>();
var temp = inputType;
while (temp != typeof(object))
{
typeList.Add(temp);
temp = temp.GetTypeInfo().BaseType;
}
typeList.AddRange(inputType.GetTypeInfo().ImplementedInterfaces);
return typeList;
}
private void RefreshKnownOutputMappings(Type outputType)
{
foreach (var knownMappings in _knownOutputMappings)
{
if (GetTypes(knownMappings.Key).Contains(outputType) && !knownMappings.Value.Contains(outputType))
{
knownMappings.Value.Add(outputType);
}
}
}
private sealed class OutputSubject<T> : ISubject<object, T>, IDisposable
{
private readonly Subject<T> _subject;
private int _refcount;
public OutputSubject()
{
_subject = new Subject<T>();
}
public void Dispose()
{
_refcount--;
}
public void OnCompleted()
{
_subject.OnCompleted();
}
public void OnError(Exception error)
{
_subject.OnError(error);
}
public void OnNext(object value)
{
_subject.OnNext((T)value);
}
public IDisposable Subscribe(IObserver<T> observer)
{
IDisposable subscription = _subject.Subscribe(observer);
_refcount++;
return new CompositeDisposable(subscription, this);
}
}
} | class | c# | 130 |
[OperationMethodImplementation("AEGIS::253502", "Classified image change detection")]
public sealed class ClassifiedImageChangeDetection : ClassificationChangeDetection<ISpectralGeometry>
{
#region Private fields
private readonly Boolean _differencialChangeDetection;
private readonly Boolean _lossDetection;
private readonly List<Int32> _categoryIndices;
private readonly Dictionary<Int32, Int32> _categoryBandAssociation;
private readonly Int32 _numberOfCategories;
#endregion
#region Constructor
public ClassifiedImageChangeDetection(ISpectralGeometry input, IDictionary<OperationParameter, Object> parameters)
: base(input, SpectralOperationMethods.ClassifiedImageChangeDetection, parameters)
{
_differencialChangeDetection = ResolveParameter<Boolean>(SpectralOperationParameters.DifferentialChangeDetection);
_lossDetection = ResolveParameter<Boolean>(SpectralOperationParameters.LossDetection);
_categoryIndices = new List<Int32>();
if (IsProvidedParameter(SpectralOperationParameters.ChangeDetectionCategoryIndex))
{
Int32 categoryIndex = ResolveParameter<Int32>(SpectralOperationParameters.ChangeDetectionCategoryIndex);
_categoryIndices.Add(categoryIndex);
}
else if (IsProvidedParameter(SpectralOperationParameters.ChangeDetectionCategoryIndices))
{
Int32[] indices = ResolveParameter<Int32[]>(SpectralOperationParameters.ChangeDetectionCategoryIndices);
_categoryIndices.AddRange(indices);
}
else
{
_numberOfCategories = GetNumberOfCategories(Source.Raster);
for (Int32 i = 0; i < _numberOfCategories; i++)
_categoryIndices.Add(i);
}
Int32 bandIndex = 0;
_categoryBandAssociation = _categoryIndices.ToDictionary(x => x, x => bandIndex++);
}
#endregion
#region Protected Operation methods
protected override ISpectralGeometry PrepareResult()
{
IRasterFactory factory = Source.Factory.GetFactory<ISpectralGeometryFactory>()
.GetFactory<IRasterFactory>();
Int32 radiometricResolution = _numberOfCategories > 65535 ? 32 : (_numberOfCategories > 255 ? 16 : 8);
IRaster raster = factory.CreateRaster(_categoryIndices.Count, Source.Raster.NumberOfRows,
Source.Raster.NumberOfColumns, radiometricResolution,
Source.Raster.Mapper);
return Source.Factory.CreateSpectralGeometry(Source, raster, Source.Presentation, Source.Imaging);
}
#endregion
#region Protected ClassificationChangeDetection methods
protected override void ComputeChange(Int32 rowIndex, Int32 columnIndex, UInt32 value, UInt32 referenceValue)
{
if (value == referenceValue)
{
if (!_differencialChangeDetection && _categoryBandAssociation.ContainsKey((Int32) value))
Result.Raster.SetValue(rowIndex, columnIndex, _categoryBandAssociation[(Int32) value], value);
return;
}
if (!_lossDetection)
{
if (_categoryBandAssociation.ContainsKey((Int32)value))
Result.Raster.SetValue(rowIndex, columnIndex, _categoryBandAssociation[(Int32)value], referenceValue);
if(!_differencialChangeDetection && _categoryBandAssociation.ContainsKey((Int32)referenceValue))
Result.Raster.SetValue(rowIndex, columnIndex, _categoryBandAssociation[(Int32)referenceValue], referenceValue);
}
else
{
if(_categoryBandAssociation.ContainsKey((Int32)referenceValue))
Result.Raster.SetValue(rowIndex, columnIndex, _categoryBandAssociation[(Int32)referenceValue], value);
if(!_differencialChangeDetection && _categoryBandAssociation.ContainsKey((Int32)value))
Result.Raster.SetValue(rowIndex, columnIndex, _categoryBandAssociation[(Int32)value], value);
}
}
#endregion
} | class | c# | 131 |
func (e *Event) MarshalJSON() ([]byte, error) {
type EventAlias Event
switch e.EventType {
case ROT.String(), DRT.String():
if e.RemoveWitness == nil {
e.RemoveWitness = []string{}
}
if e.AddWitness == nil {
e.AddWitness = []string{}
}
if e.Seals == nil {
e.Seals = SealArray{}
}
return json.Marshal(&struct {
*EventAlias
RemoveWitness []string `json:"wr"`
AddWitness []string `json:"wa"`
Seals SealArray `json:"a"`
}{
EventAlias: (*EventAlias)(e),
RemoveWitness: e.RemoveWitness,
AddWitness: e.AddWitness,
Seals: e.Seals,
})
case IXN.String():
if e.Seals == nil {
e.Seals = SealArray{}
}
return json.Marshal(&struct {
*EventAlias
Seals SealArray `json:"a"`
}{
EventAlias: (*EventAlias)(e),
Seals: e.Seals,
})
case ICP.String():
if e.Config == nil {
e.Config = []prefix.Trait{}
}
if e.Witnesses == nil {
e.Witnesses = []string{}
}
return json.Marshal(&struct {
*EventAlias
Witnesses []string `json:"w"`
Config []prefix.Trait `json:"c"`
}{
EventAlias: (*EventAlias)(e),
Witnesses: e.Witnesses,
Config: e.Config,
})
case VRC.String():
if e.Seals == nil {
return nil, errors.New("unable to serialize a receipt without a seal")
}
return json.Marshal(&struct {
Version string `json:"v"`
Prefix string `json:"i,omitempty"`
Sequence string `json:"s"`
EventType string `json:"t"`
Digest string `json:"d,omitempty"`
Data *Seal `json:"a"`
}{
Version: e.Version,
Prefix: e.Prefix,
Sequence: e.Sequence,
EventType: e.EventType,
Digest: e.EventDigest,
Data: e.Seals[0],
})
}
return json.Marshal(&struct {
*EventAlias
}{
EventAlias: (*EventAlias)(e),
})
} | function | go | 132 |
private void attachCallbacks()
{
TransferUnitReceiver.instance.addTransferUnitReceiver(new TransferUnitReceiver.ReceiveTransferUnit(receiveTransferUnit));
TransferUnitReceiver.instance.addTextMessageReceiver(new TransferUnitReceiver.ReceiveTextMessage(receiveTextMessage));
TransferUnitReceiver.instance.addExceptionReceiver(new TransferUnitReceiver.ReceiveRemoteHookingException(receiveException));
TransferUnitReceiver.instance.addGetApiCallsToInterceptReceiver(new TransferUnitReceiver.ReceiveGetApiCallsToIntercept(getListOfInterceptedApiCalls));
} | function | c# | 133 |
@Test
public void testUnnecessaryILE() throws InterruptedException {
/*
* Ensure that the cleaner does not remove log files thus provoking an
* ILE, defeating the purpose of this test.
*/
setEnvConfigParam(EnvironmentParams.ENV_RUN_CLEANER, "false");
final long rn3maxVLSN = setupTimedOutEnvironment();
repEnvInfo[0].getRepImpl().getRepNode().recalculateGlobalCBVLSN();
VLSN gcbvlsn = repEnvInfo[0].getRepImpl().getRepNode().getGlobalCBVLSN();
assertTrue(gcbvlsn.getSequence() > rn3maxVLSN);
try {
repEnvInfo[2].openEnv();
} catch (InsufficientLogException ile) {
fail("Unexpected ILE sr22782");
}
} | function | java | 134 |
func T(err error) error {
if err == nil {
return nil
}
switch err.(type) {
case *Error:
return err.(*Error)
}
pc := make([]uintptr, 10)
runtime.Callers(2, pc)
frames := runtime.CallersFrames(pc)
var st []Stack
for {
f, more := frames.Next()
if !more {
break
}
st = append(st, Stack{
File: formatFileName(f.File),
Line: f.Line,
FuncName: f.Function,
})
}
return &Error{error: err, stackTrace: st, format: dErrorFormat}
} | function | go | 135 |
bool Geometry::Hit(const Ray& ray, TriHitEvent* hitinfo) const
{
Box3 bvol;
float distance;
Vec3 intersect;
if (!GetBound(&bvol))
return false;
if (hitinfo == NULL)
return bvol.Hit(ray, distance, NULL);
if (bvol.Hit(ray, distance, &intersect) && (distance < hitinfo->Distance))
{
hitinfo->Target = (Mesh*) this;
hitinfo->Distance = distance;
hitinfo->Intersect = intersect;
hitinfo->TriIndex = -1;
return true;
}
return false;
} | function | c++ | 136 |
public object CreateDuck(object inst, Type[] duckTypes, IServiceProvider serviceProvider = null)
{
if (inst == null)
{
return null;
}
Type duckType = this.GetOrCreateDuckType(inst.GetType(), duckTypes, null);
return Activator.CreateInstance(duckType, inst, null);
} | function | c# | 137 |
public abstract class SchemaFactoryLoader {
/**
* A do-nothing constructor.
*/
protected SchemaFactoryLoader() {
}
/**
* Creates a new {@link SchemaFactory} object for the specified
* schema language.
*
* @param schemaLanguage
* See <a href="SchemaFactory.html#schemaLanguage">
* the list of available schema languages</a>.
*
* @throws NullPointerException
* If the <code>schemaLanguage</code> parameter is null.
*
* @return <code>null</code> if the callee fails to create one.
*/
public abstract SchemaFactory newFactory(String schemaLanguage);
} | class | java | 138 |
def deconv(self, input_layer, num_filters, filter_size,
filter_strides=(2,2), padding='SAME',
activation=None, use_batch_norm=None):
num_inputs = input_layer.get_shape().as_list()[1]
ih, iw = input_layer.get_shape().as_list()[2:]
output_shape = [-1, num_filters,
ih*filter_strides[0], iw*filter_strides[1]]
kernel_shape = [filter_size[0], filter_size[1],
num_filters, num_inputs]
strides = [1, 1, filter_strides[0], filter_strides[1]]
with tf.variable_scope(self._count_layer('deconv')) as scope:
kernel = self._get_variable('weights', kernel_shape,
input_layer.dtype)
x = tf.nn.conv2d_transpose(input_layer, kernel, output_shape,
strides, padding=padding,
data_format='NCHW')
x = self._bias_or_batch_norm(x, scope, use_batch_norm)
x = self.activate(x, activation)
return x | function | python | 139 |
def edges(
self, include_edge_type=False, include_edge_weight=False
) -> Iterable[Any]:
if include_edge_type:
edges = list(
zip(
self._edges.sources,
self._edges.targets,
self._edges.type_of_iloc(slice(None)),
)
)
else:
edges = list(zip(self._edges.sources, self._edges.targets))
if include_edge_weight:
return edges, self._edges.weights
return edges | function | python | 140 |
public class Bug1795838 {
public static void main(String arg[]) {
Bug1795838 fb = new Bug1795838();
fb.foo();
}
public void foo() {
int x;
// Guaranteed NullPointerException
x = bar(); // NPE due to implicit Integer.intValue()
System.out.println("X is " + x);
Integer tmp;
tmp = bar();
// Guaranteed NullPointerException
int i = tmp.intValue(); // NPE
System.out.println("I is " + i);
// Guaranteed NumberFormatException
i = Integer.parseInt(null); // NFE : null
System.out.println("I is now" + i);
}
public Integer bar() {
return null;
}
} | class | java | 141 |
static struct value *
m3_nested_proc_const_closure (
struct value * proc_const_value,
CORE_ADDR * inf_code_addr_result
)
{ struct type * proc_type;
struct block * callee_block;
struct block * callee_parent_proc_block;
struct frame_info * callee_parent_frame;
struct frame_info * referring_frame;
struct value * result;
CORE_ADDR inf_code_addr;
CORE_ADDR inf_static_link;
if ( inf_code_addr_result != NULL ) { * inf_code_addr_result = 0; }
if ( proc_const_value == NULL ) { return NULL; }
proc_type = value_type ( proc_const_value );
if ( proc_type == NULL || TYPE_CODE ( proc_type ) != TYPE_CODE_FUNC )
{ return proc_const_value; }
TYPE_LENGTH ( proc_type ) = TARGET_PTR_BIT / TARGET_CHAR_BIT;
inf_code_addr
= VALUE_ADDRESS ( proc_const_value ) + value_offset ( proc_const_value );
if ( inf_code_addr == 0 ) { return proc_const_value; }
callee_block = block_for_pc ( inf_code_addr );
callee_parent_proc_block
= m3_block_proc_block ( BLOCK_SUPERBLOCK ( callee_block ) );
if ( callee_parent_proc_block == NULL )
{ result = proc_const_value; }
else
{ referring_frame = deprecated_safe_get_selected_frame ( );
callee_parent_frame
= m3_static_ancestor_frame
( referring_frame, callee_parent_proc_block, & inf_static_link);
result
= m3_build_gdb_proc_closure
( proc_type, inf_code_addr, inf_static_link );
}
if ( inf_code_addr_result != NULL )
{ * inf_code_addr_result = inf_code_addr; }
return result;
} | function | c | 142 |
public class Favorite extends RemoteHost implements java.io.Serializable,
Comparable {
private static final long serialVersionUID = -8742993458243896488L;
/**
* Creates a new instance of Favorite
*/
public Favorite() {
super();
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof Favorite)) {
return false;
}
Favorite that = (Favorite) obj;
return this.name.equalsIgnoreCase(that.name);
}
public int hashcode() {
return name.hashCode();
}
@Override
public int compareTo(Object obj) {
Favorite that = (Favorite) obj;
return this.name.toUpperCase().compareTo(that.name.toUpperCase());
}
} | class | java | 143 |
public static WorkflowManagerWF create(WorkflowManagerResult entity) {
WorkflowManagerWF result = new WorkflowManagerWF();
result.setWorkflowId(entity.getId());
if (entity.getEndTime() != null) {
result.setEndTime(Timestamp.valueOf(entity.getEndTime()));
}
if (entity.getStartTime() != null) {
result.setStartTime(Timestamp.valueOf(entity.getStartTime()));
}
result.setSuccess(entity.getWorkflowSucceddedStatus());
result.setWorkflowName(entity.getWorkflowName());
return result;
} | function | java | 144 |
def authorize(self, token):
h = {'Authorization': "Bearer {0}".format(token)}
try:
self.get('actions', headers=h)
except requests.HTTPError as e:
raise e
write_token_to_conf(token)
self.token = token
return "OK" | function | python | 145 |
private void updateProfileCache()
{
File cache = new File(getUserProfileCacheFolder());
String profilePath = getSystemProfileFolder(false);
File profiles = new File(profilePath);
if (profiles.exists())
{
try
{
FileUtils.deleteDirectory(cache);
LOGGER.info("Copy system profiles from " + profiles.getAbsolutePath() + " to " + cache.getAbsolutePath());
FileUtils.copyDirectory(profiles, cache);
} catch (IOException e)
{
LOGGER.error("Cannot update system profiles", e);
}
}
else
{
LOGGER.info(profiles.getAbsolutePath() + " not found to copy to cache");
}
} | function | java | 146 |
function dispatchActionInternal_(event, errorHandler) {
if (DEBUG) {
console.time('dispatchActionInternal');
console.log(event);
}
try {
var actionName = event.parameters.action;
if (!actionName) {
throw new Error('Missing action name.');
}
var actionFn = ActionHandlers[actionName];
if (!actionFn) {
throw new Error('Action not found: ' + actionName);
}
return actionFn(event);
} catch (err) {
console.error(err);
if (errorHandler) {
return errorHandler(err);
} else {
throw err;
}
} finally {
if (DEBUG) {
console.timeEnd('dispatchActionInternal');
}
}
} | function | javascript | 147 |
PCHAR
GetVendorString (
USHORT idVendor
)
{
PVENDOR_ID vendorID = NULL;
if (idVendor == 0x0000)
{
return NULL;
}
vendorID = USBVendorIDs;
while (vendorID->usVendorID != 0x0000)
{
if (vendorID->usVendorID == idVendor)
{
break;
}
vendorID++;
}
return (vendorID->szVendor);
} | function | c | 148 |
fn GetAvailable(&self) -> HrResult<(i64, i64)> {
let (mut early, mut late) = (i64::default(), i64::default());
unsafe {
let vt = &**(self.ptr().0 as *mut *mut IMediaSeekingVT);
ok_to_hrresult((vt.GetPositions)(self.ptr(), &mut early, &mut late))
}.map(|_| (early, late))
} | function | rust | 149 |
public ContactNode addContact(UIContactImpl uiContact)
{
if (logger.isDebugEnabled())
logger.debug("Group node add contact: "
+ uiContact.getDisplayName());
int selectedIndex = getLeadSelectionRow();
ContactNode contactNode = new ContactNode(uiContact);
uiContact.setContactNode(contactNode);
add(contactNode);
fireNodeInserted(children.size() - 1);
refreshSelection(selectedIndex, getLeadSelectionRow());
return contactNode;
} | function | java | 150 |
public class CommunicationException extends WrapperException
{
/***************************************************************************************************************//**
* Constructor.
******************************************************************************************************************/
public CommunicationException(String message, Exception innerException)
{
super(message, innerException);
}
} | class | java | 151 |
@Test
public void testCleanupAnalysisSubmissionsCompletedOverOneDaySuccess() throws ExecutionManagerException {
analysisExecutionScheduledTask = new AnalysisExecutionScheduledTaskImpl(analysisSubmissionRepository,
analysisExecutionService, new CleanupAnalysisSubmissionConditionAge(Duration.ofDays(1)),
galaxyJobErrorsService, jobErrorRepository, emailController, analysisWorkspaceService);
when(analysisSubmissionMock.getAnalysisState()).thenReturn(AnalysisState.COMPLETED);
when(analysisSubmissionMock.getAnalysisCleanedState()).thenReturn(AnalysisCleanedState.NOT_CLEANED);
when(analysisSubmissionMock.getCreatedDate()).thenReturn(DateTime.now()
.minusDays(2)
.toDate());
when(analysisSubmissionRepository.findByAnalysisState(AnalysisState.COMPLETED,
AnalysisCleanedState.NOT_CLEANED)).thenReturn(Arrays.asList(analysisSubmissionMock));
Set<Future<AnalysisSubmission>> futureSubmissionsSet = analysisExecutionScheduledTask.cleanupAnalysisSubmissions();
assertEquals("Incorrect size for futureSubmissionsSet", 1, futureSubmissionsSet.size());
verify(analysisExecutionService).cleanupSubmission(analysisSubmissionMock);
} | function | java | 152 |
def compare_JSON(json1, json2, raise_error=False):
for linenumber, (line1, line2) in enumerate(zip(json1.splitlines(),
json2.splitlines())):
if (line1 != line2):
if raise_error:
raise Exception("JSON differs at line: " + str(linenumber))
return False
return True | function | python | 153 |
public void Pan(Vector2 direction, bool orientWithMap)
{
var mercatorExtent = 2.0 / Math.Pow(2, _map.ZoomLevel - 1);
if (orientWithMap)
{
var currentRotation = _map.transform.localRotation.eulerAngles.y;
Translate(
mercatorExtent * _panSpeed * Time.deltaTime *
new Vector2D(
(direction.x * Math.Cos(currentRotation * DegToRad)) - (direction.y * Math.Sin(currentRotation * DegToRad)),
(direction.x * Math.Sin(currentRotation * DegToRad)) + (direction.y * Math.Cos(currentRotation * DegToRad))));
}
else
{
Translate(mercatorExtent * _panSpeed * Time.deltaTime * new Vector2D(direction.x, direction.y));
}
} | function | c# | 154 |
def testListComments_UnknownApprovalInFilter(self):
approval_comment = tracker_pb2.IssueComment(
id=123,
issue_id=self.issue_1.issue_id,
project_id=self.issue_1.project_id,
user_id=self.owner.user_id,
content='comment 2 - approval 1',
approval_id=1)
self.services.issue.TestAddComment(approval_comment, self.issue_1.local_id)
request = issues_pb2.ListCommentsRequest(
parent=self.issue_1_resource_name, page_size=1,
filter='approval = "projects/chicken/approvalDefs/404"')
mc = monorailcontext.MonorailContext(
self.services, cnxn=self.cnxn, requester=self.owner.email)
response = self.CallWrapped(self.issues_svcr.ListComments, mc, request)
self.assertEqual(len(response.comments), 0) | function | python | 155 |
public class TeamLeadName {
public static void maxNonRepetitiveLetters(){
String[] inputNames={"kylan charles",
"andrea meza",
"strickland raymond",
"destiney alvarado",
"raymond strickland"};
String teamLead="";
int maxLength=0;
for (String input: inputNames) {
int length =removeDuplicatesAndSpaceAndCountLength(input);
if(length>maxLength){
maxLength=length;
teamLead=input;
}else if(length==maxLength){
//check equal case and resolve lexographical order
if(input.compareTo(teamLead)<0){
teamLead=input;
}
}
}
System.out.println("Team Lead is "+teamLead+ " and max length is "+maxLength);
}
// Algorithm to remove duplicates, space and count length
private static int removeDuplicatesAndSpaceAndCountLength(String input) {
//convert all string to lower case
String lowerCase=input.toLowerCase();
//remove all whitespaces
String lowerCaseWithoutSpaces = lowerCase.replaceAll("\\s", "");
//remove duplicates
StringBuilder sb=new StringBuilder();
lowerCaseWithoutSpaces.chars().distinct().forEach(ch -> sb.append((char)ch));
return sb.toString().length();
}
} | class | java | 156 |
public static <V, T extends Throwable, T2 extends Throwable> void assertValueGetSet(
ThrowingSupplier<V, T> getter, ThrowingConsumer<V, T2> setter, V value, V value2) throws T, T2{
setter.accept(value);
assertEquals(value, getter.get());
setter.accept(value2);
assertEquals(value2, getter.get());
} | function | java | 157 |
public boolean add(Where where, int index, E e) {
if (is_full()) {
grow();
}
if (where == Where.MIDDLE && index <= num_elements - 1)
System.out.println("Inserting element at index " + num_elements);
System.arraycopy(elements, index, elements, index + 1, num_elements - index);
elements[index] = e;
num_elements++;
return true;
} | function | java | 158 |
int raspitex_parse_cmdline(RASPITEX_STATE *state,
const char *arg1, const char *arg2)
{
int command_id, used = 0, num_parameters;
if (!arg1)
return 0;
command_id = raspicli_get_command_id(cmdline_commands,
cmdline_commands_size, arg1, &num_parameters);
if (command_id==-1 || (command_id != -1 && num_parameters > 0 && arg2 == NULL))
return 0;
switch (command_id)
{
case CommandGLWin:
{
int tmp;
tmp = sscanf(arg2, "%d,%d,%d,%d",
&state->x, &state->y, &state->width, &state->height);
if (tmp != 4)
{
state->x = state->y = 0;
state->width = DEFAULT_WIDTH;
state->height = DEFAULT_HEIGHT;
}
else
{
state->gl_win_defined = 1;
}
used = 2;
break;
}
case CommandGLScene:
{
if (strcmp(arg2, "square") == 0)
state->scene_id = RASPITEX_SCENE_SQUARE;
else if (strcmp(arg2, "teapot") == 0)
state->scene_id = RASPITEX_SCENE_TEAPOT;
else if (strcmp(arg2, "mirror") == 0)
state->scene_id = RASPITEX_SCENE_MIRROR;
else if (strcmp(arg2, "yuv") == 0)
state->scene_id = RASPITEX_SCENE_YUV;
else if (strcmp(arg2, "sobel") == 0)
state->scene_id = RASPITEX_SCENE_SOBEL;
else
vcos_log_error("Unknown scene %s", arg2);
used = 2;
break;
}
}
return used;
} | function | c | 159 |
static void LVHB_SetMicroOffset(lvhb_drv_config_t* const drvConfig)
{
lvhb_stepper_data_t *stepperData = &(drvConfig->deviceConfig.stepperData);
uint16_t offsetVal;
uint8_t idx1;
uint8_t idx2;
idx1 = (stepperData->microWind1Idx > LVHB_MICROSTEP_CNT_MAX) ?
(LVHB_MICROSTEP_CNT_MAX * 2 - stepperData->microWind1Idx) :
stepperData->microWind1Idx;
idx2 = (stepperData->microWind2Idx > LVHB_MICROSTEP_CNT_MAX) ?
(LVHB_MICROSTEP_CNT_MAX * 2 - stepperData->microWind2Idx) :
stepperData->microWind2Idx;
offsetVal = LVHB_GET_MICRO_OFFSET(drvConfig, LVHB_FULLSTEP_CH_IN1A, LVHB_FULLSTEP_SEQ[stepperData->fullStepIdx], idx1);
TMR_AML_SetOffsetTicks(drvConfig->tmrInstance, drvConfig->tmrLvhbConfig.inxaChannelNumber[0], offsetVal);
offsetVal = LVHB_GET_MICRO_OFFSET(drvConfig, LVHB_FULLSTEP_CH_IN1B, LVHB_FULLSTEP_SEQ[stepperData->fullStepIdx], idx1);
TMR_AML_SetOffsetTicks(drvConfig->tmrInstance, drvConfig->tmrLvhbConfig.inxbChannelNumber[0], offsetVal);
offsetVal = LVHB_GET_MICRO_OFFSET(drvConfig, LVHB_FULLSTEP_CH_IN2A, LVHB_FULLSTEP_SEQ[stepperData->fullStepIdx], idx2);
TMR_AML_SetOffsetTicks(drvConfig->tmrInstance, drvConfig->tmrLvhbConfig.inxaChannelNumber[1], offsetVal);
offsetVal = LVHB_GET_MICRO_OFFSET(drvConfig, LVHB_FULLSTEP_CH_IN2B, LVHB_FULLSTEP_SEQ[stepperData->fullStepIdx], idx2);
TMR_AML_SetOffsetTicks(drvConfig->tmrInstance, drvConfig->tmrLvhbConfig.inxbChannelNumber[1], offsetVal);
} | function | c | 160 |
@JsOverlay
@Nonnull
public final CssProps borderSpacing( String borderSpacing )
{
setBorderSpacing( borderSpacing );
return this;
} | function | java | 161 |
def moran_process(
A: npt.NDArray,
initial_population: npt.NDArray,
) -> Generator[npt.NDArray, None, None]:
population = initial_population
if len(set(population)) > 1:
while len(set(population)) != 1:
scores = score_all_individuals(A=A, population=population)
population = update_population(population=population, scores=scores)
yield population
else:
yield population | function | python | 162 |
def convert_path_to_targetname(include_path):
path_components = include_path.split("/")
for idx in range(len(path_components) - 1):
path_components[idx] = path_components[idx].replace("_", "")
if path_components[-1].endswith("_gen.hpp"):
path_components[-1] = path_components[-1][:-8]
elif path_components[-1].endswith(".hpp"):
path_components[-1] = path_components[-1][:-4]
elif path_components[-1].endswith(".pb.h"):
path_components[-1] = path_components[-1][:-5] + "_proto"
return "quickstep_" + "_".join(path_components) | function | python | 163 |
class Consumer:
"""Tool for consuming messages generated by the broker.
Args:
broker (Broker): an Alsek broker
subset (List[str], Dict[str, List[str]], optional): subset of messages to consume
Must be one of the following
* ``None``: consume messages from all queues and tasks
* ``list``: a list of queues of the form ``["queue_a", "queue_b", "queue_c", ...]``
* ``dict``: a dictionary of queues and tasks of the form
``{"queue_a": ["task_name_a", "task_name_b", "task_name_c", ...], ...}``
backoff (Backoff, optional): backoff to use in response to passes over the backend
which did not yield any actionable messages.
Notes:
* If ``subset`` is a ``list`` or ``dict``, queue priority is derived from the
order of the items. Items which appear earlier are given higher priority.
* If ``subset`` is a ``dict``, task priority is derived from the order of
task names in the value associated with each key (queue).
Warning:
* If ``subset`` is of type ``dict``, task names not included
in the any of the values will be ignored.
"""
def __init__(
self,
broker: Broker,
subset: Optional[Union[List[str], Dict[str, List[str]]]] = None,
backoff: Optional[Backoff] = LinearBackoff(
1 * 1000,
floor=1000,
ceiling=30_000,
zero_override=False,
),
) -> None:
self.subset = subset
self.broker = broker
self.backoff = backoff or ConstantBackoff(0, floor=0, ceiling=0)
self._empty_passes: int = 0
self.stop_signal = StopSignalListener()
def _scan_subnamespaces(self) -> Iterable[str]:
if not self.subset:
subnamespaces = [self.broker.get_subnamespace(None)]
elif isinstance(self.subset, list):
subnamespaces = [self.broker.get_subnamespace(q) for q in self.subset]
else:
subnamespaces = [
self.broker.get_subnamespace(q, task_name=t)
for (q, tasks) in self.subset.items()
for t in tasks
]
for s in subnamespaces:
yield from self.broker.backend.scan(f"{s}*")
def _poll(self) -> Iterable[Message]:
empty: bool = True
for name in self._scan_subnamespaces():
message_data = self.broker.backend.get(name)
if message_data is None:
# Message data can be None if it is deleted (e.g., by
# a TTL or worker) between the scan() and get() operations.
continue
message = Message(**message_data)
if message.ready:
empty = False
with _ConsumptionMutex(message, self.broker.backend) as lock:
if lock.acquire(strict=False):
yield message._link_lock(lock, override=True)
self._empty_passes = self._empty_passes + 1 if empty else 0
def stream(self) -> Iterable[Message]:
"""Generate a stream of messages to process from
the data backend.
Returns:
stream (Iterable[Message]): an iterable of messages to process
"""
while not self.stop_signal.received:
for message in self._poll():
yield message
self.stop_signal.wait(self.backoff.get(self._empty_passes)) | class | python | 164 |
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(MusicPlayer));
this.TopPanel = new System.Windows.Forms.Panel();
this.label1 = new System.Windows.Forms.Label();
this.pictureBox1 = new System.Windows.Forms.PictureBox();
this.listBoxSongs = new System.Windows.Forms.ListBox();
this.btnPlay = new System.Windows.Forms.Button();
this.TopPanel.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
this.SuspendLayout();
TopPanel
this.TopPanel.BackColor = System.Drawing.Color.Turquoise;
this.TopPanel.Controls.Add(this.pictureBox1);
this.TopPanel.Controls.Add(this.label1);
this.TopPanel.Dock = System.Windows.Forms.DockStyle.Top;
this.TopPanel.Location = new System.Drawing.Point(0, 0);
this.TopPanel.Name = "TopPanel";
this.TopPanel.Size = new System.Drawing.Size(1706, 65);
this.TopPanel.TabIndex = 0;
label1
this.label1.AutoSize = true;
this.label1.Font = new System.Drawing.Font("Harlow Solid Italic", 12F, ((System.Drawing.FontStyle)((System.Drawing.FontStyle.Bold | System.Drawing.FontStyle.Italic))), System.Drawing.GraphicsUnit.Point);
this.label1.Location = new System.Drawing.Point(214, 22);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(135, 25);
this.label1.TabIndex = 0;
this.label1.Text = "Music Player";
pictureBox1
this.pictureBox1.Image = ((System.Drawing.Image)(resources.GetObject("pictureBox1.Image")));
this.pictureBox1.Location = new System.Drawing.Point(1633, 12);
this.pictureBox1.Name = "pictureBox1";
this.pictureBox1.Size = new System.Drawing.Size(46, 44);
this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.StretchImage;
this.pictureBox1.TabIndex = 1;
this.pictureBox1.TabStop = false;
listBoxSongs
this.listBoxSongs.Font = new System.Drawing.Font("HoloLens MDL2 Assets", 10.2F, ((System.Drawing.FontStyle)((System.Drawing.FontStyle.Bold | System.Drawing.FontStyle.Italic))), System.Drawing.GraphicsUnit.Point);
this.listBoxSongs.FormattingEnabled = true;
this.listBoxSongs.ItemHeight = 17;
this.listBoxSongs.Location = new System.Drawing.Point(1295, 71);
this.listBoxSongs.Name = "listBoxSongs";
this.listBoxSongs.Size = new System.Drawing.Size(384, 276);
this.listBoxSongs.TabIndex = 1;
btnPlay
this.btnPlay.BackColor = System.Drawing.Color.Turquoise;
this.btnPlay.ForeColor = System.Drawing.Color.Black;
this.btnPlay.Location = new System.Drawing.Point(1295, 364);
this.btnPlay.Name = "btnPlay";
this.btnPlay.Size = new System.Drawing.Size(384, 50);
this.btnPlay.TabIndex = 2;
this.btnPlay.Text = "Play";
this.btnPlay.UseVisualStyleBackColor = false;
MusicPlayer
this.AutoScaleDimensions = new System.Drawing.SizeF(12F, 22F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.Color.DimGray;
this.ClientSize = new System.Drawing.Size(1706, 495);
this.Controls.Add(this.btnPlay);
this.Controls.Add(this.listBoxSongs);
this.Controls.Add(this.TopPanel);
this.Font = new System.Drawing.Font("Harlow Solid Italic", 10.2F, ((System.Drawing.FontStyle)((System.Drawing.FontStyle.Bold | System.Drawing.FontStyle.Italic))), System.Drawing.GraphicsUnit.Point);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;
this.Margin = new System.Windows.Forms.Padding(4, 3, 4, 3);
this.Name = "MusicPlayer";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "Music Player";
this.TopPanel.ResumeLayout(false);
this.TopPanel.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
this.ResumeLayout(false);
} | function | c# | 165 |
int xprt_rdma_bc_up(struct svc_serv *serv, struct net *net)
{
int ret;
ret = svc_create_xprt(serv, "rdma-bc", net, PF_INET, 0, 0);
if (ret < 0)
return ret;
return 0;
} | function | c | 166 |
async fn wait_for_tasks(task_futures: Vec<JoinHandle<()>>) {
match future::select_all(task_futures).await {
(Ok(_), _, _) => {
panic!("One of the actors finished its run, while it wasn't expected to do it");
}
(Err(error), _, _) => {
log::warn!("One of the tokio actors unexpectedly finished, shutting down");
if error.is_panic() {
// Resume the panic on the main task
std::panic::resume_unwind(error.into_panic());
}
}
}
} | function | rust | 167 |
def symm_fz_hkl(l_csl_props, hkl_inds):
l_p_po = l_csl_props['l_csl_po']
norm_uvec = conv_hkl_uvecs(hkl_inds, l_p_po)
symm_grp_ax = l_csl_props['symm_grp_ax']
bp_symm_grp = l_csl_props['bp_symm_grp']
l_rp_po = np.array(fcd.reciprocal_mat(l_p_po), dtype='double')
bp_fz_norms_go1, bp_fz_stereo = pfb.pick_fz_bpl(norm_uvec, bp_symm_grp, symm_grp_ax, x_tol=1e-04)
nv_unq = gbt.unique_rows_tol(bp_fz_norms_go1)
l_po_rp = np.linalg.inv(l_rp_po)
num1 = np.shape(nv_unq)[0]
hkl_inds1 = np.zeros((num1, 3))
for ct1 in range(num1):
n1_po = nv_unq[ct1]
n1_rp = np.dot(l_po_rp,n1_po)
T1, tm1 = iman.int_approx(n1_rp)
hkl_inds1[ct1,:] = np.array(T1.reshape(1,3), dtype='double')
return hkl_inds1 | function | python | 168 |
@Deprecated
public class RemoteMapReduceException
extends HazelcastException {
public RemoteMapReduceException(String message, List<Exception> remoteCauses) {
super(message);
setStackTraceElements(remoteCauses);
}
private void setStackTraceElements(List<Exception> remoteCauses) {
StackTraceElement[] originalElements = super.getStackTrace();
int stackTraceSize = originalElements.length;
for (Exception remoteCause : remoteCauses) {
stackTraceSize += remoteCause.getStackTrace().length + 1;
}
StackTraceElement[] elements = new StackTraceElement[stackTraceSize];
System.arraycopy(originalElements, 0, elements, 0, originalElements.length);
int pos = originalElements.length;
for (Exception remoteCause : remoteCauses) {
StackTraceElement[] remoteStackTraceElements = remoteCause.getStackTrace();
elements[pos++] = new StackTraceElement("--- Remote Exception: " + remoteCause.getMessage() + " ---", "", null, 0);
for (int i = 0; i < remoteStackTraceElements.length; i++) {
StackTraceElement element = remoteStackTraceElements[i];
String className = " " + element.getClassName();
String methodName = element.getMethodName();
String fileName = element.getFileName();
elements[pos++] = new StackTraceElement(className, methodName, fileName, element.getLineNumber());
}
}
setStackTrace(elements);
}
} | class | java | 169 |
private void parseTopicParameters(HttpServletRequest request, Map<String, Object> model,
Map<String, Object> unresetableParams) {
Long edTopicId = ParameterHelper.getParameterAsLong(request.getParameterMap(),
"edPreselectedTopicId");
if (edTopicId != null && edTopicId <= 0) {
edTopicId = null;
}
if (edTopicId == null) {
String alias = request.getParameter("edPreselectedTopic");
if (StringUtils.isNotBlank(alias)) {
edTopicId = getBlogManagement().getBlogId(alias);
if (edTopicId == null) {
LOGGER.debug("Topic with alias {} does not exist", alias);
}
}
}
if (edTopicId != null) {
model.put("editorTopicId", edTopicId);
}
List<Long> fiTopicIds = getTopicIds(request, "fiPreselectedTopicIds", "fiPreselectedTopics");
if (edTopicId != null && fiTopicIds.size() == 1 && fiTopicIds.get(0).equals(edTopicId)) {
model.put(MODEL_FIELD_CONTEXT_ID, "topicSelected");
unresetableParams.put(nameProvider.getNameForTargetBlogId(), edTopicId);
unresetableParams.put(nameProvider.getNameForIncludeChildTopics(), true);
} else {
if (fiTopicIds.size() == 1) {
unresetableParams.put(nameProvider.getNameForTargetBlogId(), fiTopicIds.get(0));
} else if (fiTopicIds.size() > 1) {
unresetableParams.put(nameProvider.getNameForBlogIds(), toStringArray(fiTopicIds));
}
}
} | function | java | 170 |
static int virLXCControllerDeleteInterfaces(virLXCControllerPtr ctrl)
{
size_t i;
int ret = 0;
for (i = 0; i < ctrl->nveths; i++) {
if (virNetDevVethDelete(ctrl->veths[i]) < 0)
ret = -1;
}
return ret;
} | function | c | 171 |
def apply_filters(
self, index: int = 1, order: Qt.SortOrder = Qt.SortOrder.AscendingOrder
) -> None:
selection = self.table_view.selectedIndexes()
selected_item = (
self.current_items[selection[0].row()] if len(selection) > 0 else None
)
self.current_items = [
item
for item in self.items
if all(
filter.filter_func(item, *widgets)
for (filter, widgets) in zip(FILTERS, self.widgets)
)
]
key = list(TableModel.PROPERTY_FUNCS.keys())[index]
sort_func = TableModel.PROPERTY_FUNCS[key]
self.current_items.sort(
key=sort_func, reverse=order == Qt.SortOrder.DescendingOrder
)
if selected_item is not None:
if selected_item in self.current_items:
self.table_view.selectRow(self.current_items.index(selected_item))
else:
self.table_view.clearSelection()
self.layoutChanged.emit() | function | python | 172 |
def apply_sep(self, allow_points_outside_coverage: bool = False):
if self.raster_vdatum_sep is None:
self.log_error('Unable to find sep model, make sure you run get_datum_sep first', ValueError)
elevation_layer_idx = self._get_elevation_layer_index()
uncertainty_layer_idx = self._get_uncertainty_layer_index()
contributor_layer_idx = self._get_contributor_layer_index()
if elevation_layer_idx is None:
self.log_error('Unable to find elevation layer', ValueError)
if uncertainty_layer_idx is None:
self.log_info('Unable to find uncertainty layer, uncertainty will be entirely based off of vdatum sep model')
elevation_layer = self.layers[elevation_layer_idx]
layernames = [self.layernames[elevation_layer_idx]]
layernodata = [self.nodatavalue[elevation_layer_idx]]
uncertainty_layer = None
contributor_layer = None
if uncertainty_layer_idx is not None:
uncertainty_layer = self.layers[uncertainty_layer_idx]
layernames.append(self.layernames[uncertainty_layer_idx])
layernodata.append(self.nodatavalue[uncertainty_layer_idx])
had_uncertainty = True
else:
layernames.append('Uncertainty')
uncertainty_layer_idx = len(layernodata)
layernodata.append(self.nodatavalue[elevation_layer_idx])
had_uncertainty = False
if contributor_layer_idx is not None:
contributor_layer = self.layers[contributor_layer_idx]
layernames.append(self.layernames[contributor_layer_idx])
layernodata.append(self.nodatavalue[contributor_layer_idx])
elev_nodata = np.isnan(elevation_layer)
elev_nodata_idx = np.where(elev_nodata)
missing = np.isnan(self.raster_vdatum_sep)
missing_idx = np.where(missing & ~elev_nodata)
missing_count = len(missing_idx[0])
self.log_info(f'Applying vdatum separation model to {self.raster_vdatum_sep.size} total points')
if self.in_crs.is_height == self.out_crs.is_height:
flip = 1
else:
flip = -1
if self.in_crs.is_height == True:
final_elevation_layer = flip * (elevation_layer + self.raster_vdatum_sep)
else:
final_elevation_layer = flip * (elevation_layer - self.raster_vdatum_sep)
final_elevation_layer[elev_nodata_idx] = layernodata[elevation_layer_idx]
if had_uncertainty:
final_uncertainty_layer = uncertainty_layer + self.raster_vdatum_uncertainty
else:
final_uncertainty_layer = self.raster_vdatum_uncertainty
final_uncertainty_layer[elev_nodata_idx] = layernodata[uncertainty_layer_idx]
if contributor_layer is not None:
contributor_layer[elev_nodata_idx] = layernodata[contributor_layer_idx]
if missing_count > 0:
if allow_points_outside_coverage:
self.log_info(f'Allowing {missing_count} points that are outside of vdatum coverage.')
final_elevation_layer[missing_idx] = flip * elevation_layer[missing_idx]
if self.in_crs.is_height:
z_values = elevation_layer[missing_idx]
else:
z_values = -elevation_layer[missing_idx]
u_values = 3 - 0.06 * z_values
u_values[np.where(z_values > 0)] = 3.0
if had_uncertainty:
keep_uncert_sub_idx = np.where(u_values < uncertainty_layer[missing_idx])[0]
if len(keep_uncert_sub_idx) > 0:
self.log_info(f'Maintaining {len(keep_uncert_sub_idx)} points from source uncertainty since greater than CATZOC D vertical uncertainty.')
u_values[keep_uncert_sub_idx] = uncertainty_layer[missing_idx[0][keep_uncert_sub_idx], missing_idx[1][keep_uncert_sub_idx]]
final_uncertainty_layer[missing_idx] = u_values
else:
self.log_info(f'applying nodatavalue to {missing_count} points that are outside of vdatum coverage')
final_elevation_layer[missing_idx] = layernodata[elevation_layer_idx]
final_uncertainty_layer[missing_idx] = layernodata[uncertainty_layer_idx]
if contributor_layer is not None:
contributor_layer[missing_idx] = layernodata[contributor_layer_idx]
layers = (final_elevation_layer, final_uncertainty_layer, contributor_layer)
return layers, layernames, layernodata | function | python | 173 |
public static ApiResponse<ListMoviesData> ListMovies(Format format = Format.JSON, byte limit = 20, uint page = 1,
Quality quality = Quality.ALL, byte minimumRating = 0, string queryTerm = "", string genre = "ALL",
Sort sortBy = Sort.DateAdded, Order orderBy = Order.Descending)
{
if (limit > 50 || limit < 1)
throw new ArgumentOutOfRangeException("limit", limit, "Limit must be between 1 - 50 (inclusive).");
if (minimumRating > 9)
throw new ArgumentOutOfRangeException("minimumRating", minimumRating,
"Must be between 0 - 9 (inclusive).");
string apiReq =
string.Format(
"limit={0}&page={1}&quality={2}&minimum_rating={3}&query_term={4}&genre={5}&sort_by={6}&order_by={7}",
limit, page, ParseQuality(quality), minimumRating, queryTerm, genre, ParseSort(sortBy),
ParseOrder(orderBy));
Stream stream;
try
{
stream =
WebRequest.Create(string.Format("https://yts.to/api/v2/list_movies.{0}?{1}", ParseFormat(format),
apiReq))
.GetResponse()
.GetResponseStream();
using (StreamReader sr = new StreamReader(stream))
{
return new ApiResponse<ListMoviesData>(JsonConvert.DeserializeObject<ApiResponseRaw>(sr.ReadToEnd()));
}
}
catch (WebException)
{
throw new WebException("No internet connection.");
}
} | function | c# | 174 |
static shr_error_e
dnx_ingress_congestion_dram_internal_resource_type_get(
int unit,
bcm_cosq_resource_t resource,
dnx_ingress_congestion_dram_bound_resource_type_e * internal_resource)
{
SHR_FUNC_INIT_VARS(unit);
*internal_resource = DNX_INGRESS_CONGESTION_DRAM_BOUND_RESOURCE_INVALID;
switch (resource)
{
case bcmResourceOcbBytes:
*internal_resource = DNX_INGRESS_CONGESTION_DRAM_BOUND_RESOURCE_SRAM_BYTES;
break;
case bcmResourceOcbBuffers:
*internal_resource = DNX_INGRESS_CONGESTION_DRAM_BOUND_RESOURCE_SRAM_BUFFERS;
break;
case bcmResourceOcbPacketDescriptors:
*internal_resource = DNX_INGRESS_CONGESTION_DRAM_BOUND_RESOURCE_SRAM_PDS;
break;
default:
break;
}
SHR_FUNC_EXIT;
} | function | c | 175 |
double SurfacePoint::DistanceToDouble(Distance dist, CoordUnits units) {
double value;
switch (units) {
case Meters:
value = dist.meters();
break;
case Kilometers:
value = dist.kilometers();
break;
default:
IString msg = "Unrecognized unit for a Distance (not meters or kilometers).";
throw IException(IException::Programmer, msg, _FILEINFO_);
}
return value;
} | function | c++ | 176 |
def record_treatment_stats(self, impressions, latency, operation):
try:
impressions = self._impressions_manager.process_impressions(impressions)
if self._impression_storage.put(impressions):
self._telemetry_storage.inc_latency(operation, latency)
except Exception:
_LOGGER.error('Error recording impressions and metrics')
_LOGGER.debug('Error: ', exc_info=True) | function | python | 177 |
function cleanup(line){
var indexes=['\'', '\"', '/'],
result=line;
for(var i=0; i<indexes.length; ++i){
var ind=line.indexOf(indexes[i]);
if(ind<0)
indexes.splice(i--, 1);
else
indexes[i]=[indexes[i], ind];
}
if(indexes.length){
indexes.sort(function (a, b){ return a[1]-b[1]})
var quoteChar=indexes[0][0],
closingQuoteCharSeq=quoteChar,
ind=indexes[0][1];
result=line.substring(0, ind);
if(quoteChar=='/'){
var nextChar=line.charAt(ind+1);
if(nextChar=='/')
line='';
else if(nextChar=='*'){
result=line;
line='';
}
}
if(line.length){
line=line.substring(ind+quoteChar.length);
var closingInd=-1;
do{
closingInd=line.indexOf(closingQuoteCharSeq);
if(closingInd>0){
var escapes=1;
while(line.charAt(closingInd-escapes)=='\\')
++escapes;
--escapes;
if(escapes%2){
line=line.substring(closingInd+1);
closingInd=-1;
}
}else
break;
}while(closingInd<0 && line.length);
if(closingInd==-1)
return '';
else
result+=cleanup(line.substring(closingInd+closingQuoteCharSeq.length));
}
}
return result;
} | function | javascript | 178 |
def hebbsom_predict_full(X, Y, mdl):
distances = []
activities = []
predictions = np.zeros_like(Y)
for h in range(X.shape[0]):
X_ = X[h]
predictions[h] = mdl.predict(X_)
distances.append(mdl.filter_e.distances(X_).flatten())
activities.append(mdl.filter_e.activity.flatten())
activities_sorted = activities[-1].argsort()
return (predictions, distances, activities) | function | python | 179 |
private void doStartProcessAndConnect(final boolean restart) throws OfficeException {
process.start(restart);
try {
new ConnectRetryable(process, connection)
.execute(config.getProcessRetryInterval(), config.getProcessTimeout());
} catch (OfficeException ex) {
throw ex;
} catch (Exception ex) {
throw new OfficeException("Could not establish connection", ex);
}
} | function | java | 180 |
func GetCount(ctx context.Context, k *datastore.Key) (c int, err error) {
q := datastore.NewQuery("PageContext")
if k.Kind() == "Page" {
q = q.
Ancestor(k)
} else {
q = q.Filter("ContextKey =", k)
}
c, err = q.Count(ctx)
return
} | function | go | 181 |
public class BufferedFileDataInput {
private static Logger logger = Logger.getLogger(BufferedFileDataInput.class);
// Read parameters.
private File file;
private int size;
// Variables to control reading.
private FileInputStream fileInput;
private BufferedInputStream bufferedInput;
private DataInputStream dataInput;
private long offset;
private long markOffset;
private int available;
/**
* Creates instance positioned on start of file.
*
* @param file
* File from which to read
* @param size
* Size of buffer for buffered I/O
*/
public BufferedFileDataInput(File file, int size) throws FileNotFoundException, IOException, InterruptedException {
this.file = file;
this.size = size;
seek(0);
}
/**
* Creates instance with default buffer size.
*/
public BufferedFileDataInput(File file) throws FileNotFoundException, IOException, InterruptedException {
this(file, 1024);
}
/**
* Returns the current offset position.
*/
public long getOffset() {
return offset;
}
/**
* Query the stream directly for the number of bytes available for immediate
* read without blocking. This operation may result in a file system
* metadata call. To find out if a specific number of bytes are known to be
* available use waitForAvailable().
*
* @return Number of bytes available for non-blocking read
*/
public int available() throws IOException {
available = bufferedInput.available();
return available;
}
/**
* Waits for a specified number of bytes to be available for a non-blocking
* read.
*
* @param requested
* Number of bytes to read
* @param waitMillis
* Milliseconds to wait before timeout
* @return Number of bytes available for non-blocking read
* @throws IOException
* Thrown if there is a problem checking for available bytes
* @throws InterruptedException
* Thrown if we are interrupted while waiting
*/
public int waitAvailable(int requested, int waitMillis) throws IOException, InterruptedException {
// If we know there is already enough data to read, return immediately.
if (available >= requested)
return available;
// Since there is not enough, wait until we see enough data to do a read
// or exceed the timeout.
long timeoutMillis = System.currentTimeMillis() + waitMillis;
while (available() < requested && System.currentTimeMillis() < timeoutMillis) {
Thread.sleep(500);
if (logger.isDebugEnabled())
logger.debug("Sleeping for 500 ms");
}
// Return true or false depending on whether we found the data.
return available;
}
/**
* Mark stream to read up to limit.
*
* @param readLimit
* Number of bytes that may be read before resetting
*/
public void mark(int readLimit) {
markOffset = offset;
bufferedInput.mark(readLimit);
}
/**
* Reset stream back to last mark.
*
* @throws IOException
* Thrown if mark has been invalidated or not set
* @throws InterruptedException
* Thrown if we are interrupted
*/
public void reset() throws IOException, InterruptedException {
try {
bufferedInput.reset();
offset = markOffset;
} catch (IOException e) {
// Need to seek directly as mark is invalidated.
this.seek(markOffset);
}
markOffset = -1;
}
/**
* Skip requested number of bytes.
*
* @param bytes
* Number of bytes to skip
* @return Number of bytes actually skipped
* @throws IOException
* Thrown if seek not supported or other error
*/
public long skip(long bytes) throws IOException {
long bytesSkipped = bufferedInput.skip(bytes);
offset += bytesSkipped;
available -= bytesSkipped;
return bytesSkipped;
}
/**
* Seek to a specific offset in the file.
*
* @param seekBytes
* Number of bytes from start of file
* @throws IOException
* Thrown if offset cannot be found
* @throws FileNotFoundException
* Thrown if file is not found
* @throws InterruptedException
* Thrown if thread is interrupted
*/
public void seek(long seekBytes) throws FileNotFoundException, IOException, InterruptedException {
fileInput = new FileInputStream(file);
try {
fileInput.getChannel().position(seekBytes);
} catch (ClosedByInterruptException e) {
// NIO rewrites InterruptException into this, which seems broken.
// To preserve interrupt handling behavior up the stack, we throw
// InterruptException.
throw new InterruptedException();
}
bufferedInput = new BufferedInputStream(fileInput, size);
dataInput = new DataInputStream(bufferedInput);
offset = seekBytes;
markOffset = -1;
available = 0;
}
/**
* Reads a single byte.
*/
public byte readByte() throws IOException {
byte v = dataInput.readByte();
offset += 1;
available -= 1;
return v;
}
/** Reads a single short. */
public short readShort() throws IOException {
short v = dataInput.readShort();
offset += 2;
available -= 2;
return v;
}
/** Read a single integer. */
public int readInt() throws IOException {
int v = dataInput.readInt();
offset += 4;
available -= 4;
return v;
}
/** Reads a single long. */
public long readLong() throws IOException {
long v = dataInput.readLong();
offset += 8;
available -= 8;
return v;
}
/**
* Reads a full byte array completely.
*
* @throws IOException
* Thrown if full byte array cannot be read
*/
public void readFully(byte[] bytes) throws IOException {
readFully(bytes, 0, bytes.length);
}
/**
* Reads a full byte array completely.
*
* @param buf
* Buffer into which to read
* @param start
* Starting byte position
* @param len
* Number of bytes to read
* @throws IOException
* Thrown if data cannot be read
*/
public void readFully(byte[] bytes, int start, int len) throws IOException {
dataInput.readFully(bytes, start, len);
offset += len;
available -= len;
}
/** Close and release all resources. */
public void close() {
try {
dataInput.close();
} catch (IOException e) {
logger.warn("Unable to close buffered file reader: file=" + file.getName() + " exception=" + e.getMessage());
}
fileInput = null;
bufferedInput = null;
dataInput = null;
offset = -1;
available = 0;
}
/**
* Print contents of the reader.
*/
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(this.getClass().getSimpleName());
sb.append(" file=").append(file.getName());
sb.append(" size=").append(size);
sb.append(" offset=").append(offset);
return sb.toString();
}
} | class | java | 182 |
internal static class InternalRegexOptions
{
private static readonly RegexOptions RegexCompiledOption;
static InternalRegexOptions()
{
#if PORTABLE
if (!Enum.TryParse("Compiled", out RegexCompiledOption))
RegexCompiledOption = RegexOptions.None;
#else
RegexCompiledOption = RegexOptions.Compiled;
#endif
}
public static RegexOptions Default
{
get
{
return RegexOptions.CultureInvariant | RegexCompiledOption;
}
}
} | class | c# | 183 |
public static class Builder {
private double baseValue = Double.NaN;
private String attributeKey;
private PacketContainer packet;
private Collection<WrappedAttributeModifier> modifiers = Collections.emptyList();
private Builder(WrappedAttribute template) {
if (template != null) {
baseValue = template.getBaseValue();
attributeKey = template.getAttributeKey();
packet = template.getParentPacket();
modifiers = template.getModifiers();
}
}
/**
* Change the base value of the attribute.
* <p>
* The modifiers will automatically supply a value if this is unset.
* @param baseValue - the base value value.
* @return This builder, for chaining.
*/
public Builder baseValue(double baseValue) {
this.baseValue = checkDouble(baseValue);
return this;
}
/**
* Set the unique attribute key that identifies its function.
* <p>
* This is required.
* @param attributeKey - the unique attribute key.
* @return This builder, for chaining.
*/
public Builder attributeKey(String attributeKey) {
this.attributeKey = Preconditions.checkNotNull(attributeKey, "attributeKey cannot be NULL.");
return this;
}
/**
* Set the modifers that will be supplied to the client, and used to compute the final value.
* @param modifiers - the attribute modifiers.
* @return This builder, for chaining.
*/
public Builder modifiers(Collection<WrappedAttributeModifier> modifiers) {
this.modifiers = Preconditions.checkNotNull(modifiers, "modifiers cannot be NULL - use an empty list instead.");
return this;
}
/**
* Set the parent update attributes packet (44).
* @param packet - the parent packet.
* @return This builder, for chaining.
*/
public Builder packet(PacketContainer packet) {
if (Preconditions.checkNotNull(packet, "packet cannot be NULL").getType() != PacketType.Play.Server.UPDATE_ATTRIBUTES) {
throw new IllegalArgumentException("Packet must be UPDATE_ATTRIBUTES (44)");
}
this.packet = packet;
return this;
}
/**
* Retrieve the unwrapped modifiers.
* @return Unwrapped modifiers.
*/
private Set<Object> getUnwrappedModifiers() {
Set<Object> output = Sets.newHashSet();
for (WrappedAttributeModifier modifier : modifiers) {
output.add(modifier.getHandle());
}
return output;
}
/**
* Build a new wrapped attribute with the values of this builder.
* @return The wrapped attribute.
* @throws RuntimeException If anything went wrong with the reflection.
*/
public WrappedAttribute build() {
Preconditions.checkNotNull(packet, "packet cannot be NULL.");
Preconditions.checkNotNull(attributeKey, "attributeKey cannot be NULL.");
// Remember to set the base value
if (Double.isNaN(baseValue)) {
throw new IllegalStateException("Base value has not been set.");
}
// Retrieve the correct constructor
if (ATTRIBUTE_CONSTRUCTOR == null) {
ATTRIBUTE_CONSTRUCTOR = FuzzyReflection.fromClass(MinecraftReflection.getAttributeSnapshotClass(), true)
.getConstructor(FuzzyMethodContract.newBuilder().parameterCount(4)
.parameterDerivedOf(MinecraftReflection.getPacketClass(), 0)
.parameterExactType(double.class, 2).parameterDerivedOf(Collection.class, 3)
.build()
);
// Just in case
ATTRIBUTE_CONSTRUCTOR.setAccessible(true);
}
Object attributeKey;
if (KEY_WRAPPED) {
if (REGISTRY == null) {
Class<?> iRegistry = MinecraftReflection.getMinecraftClass("IRegistry");
try {
REGISTRY = iRegistry.getDeclaredField("ATTRIBUTE").get(null);
} catch (ReflectiveOperationException ex) {
throw new RuntimeException("Failed to obtain ATTRIBUTE registry", ex);
}
}
if (REGISTRY_GET == null) {
Class<?> keyClass = MinecraftReflection.getMinecraftKeyClass();
REGISTRY_GET = Accessors.getMethodAccessor(REGISTRY.getClass(), "get", keyClass);
}
String strKey = REMAP.getOrDefault(this.attributeKey, this.attributeKey);
Object key = MinecraftKey.getConverter().getGeneric(new MinecraftKey(strKey));
attributeKey = REGISTRY_GET.invoke(REGISTRY, key);
if (attributeKey == null) {
throw new IllegalArgumentException("Invalid attribute name: " + this.attributeKey);
}
} else {
attributeKey = this.attributeKey;
}
try {
Object handle = ATTRIBUTE_CONSTRUCTOR.newInstance(
packet.getHandle(),
attributeKey,
baseValue,
getUnwrappedModifiers());
// Create it
return new WrappedAttribute(handle);
} catch (Exception e) {
throw new RuntimeException("Cannot construct AttributeSnapshot.", e);
}
}
} | class | java | 184 |
public void resetData() {
if (fragmentList != null) {
fragmentList.clearPeers();
}
if (fragmentDetails != null) {
fragmentDetails.resetViews();
}
} | function | java | 185 |
public final class _ExtDomApi {
private _ExtDomApi() {
// Not meant to be called
}
static public boolean isXMLNameLike(String name) {
return DomStringUtil.isXMLNameLike(name);
}
static public boolean matchesName(String qname, String nodeName, String nsURI, Environment env) {
return DomStringUtil.matchesName(qname, nodeName, nsURI, env);
}
} | class | java | 186 |
def doCIVETlinking(colname, archive_tag, civet_ext):
for i in range(0,len(checklist)):
target = os.path.join(civet_in, prefix + '_' + checklist['id'][i] + civet_ext)
if os.path.exists(target)==False:
mncdir = os.path.join(inputpath,checklist['id'][i])
if pd.isnull(checklist[colname][i]):
mncfiles = []
for fname in os.listdir(mncdir):
if archive_tag in fname:
mncfiles.append(fname)
if DEBUG: print "Found {} {} in {}".format(len(mncfiles),archive_tag,mncdir)
if len(mncfiles) == 1:
checklist[colname][i] = mncfiles[0]
elif len(mncfiles) > 1 & QCedTranfer:
meanmnc = [m for m in mncfiles if "mean" in m]
if len(meanmnc) == 1:
checklist[colname][i] = meanmnc[0]
else:
checklist['notes'][i] = "> 1 {} found".format(archive_tag)
elif len(mncfiles) > 1 & QCedTranfer==False:
checklist['notes'][i] = "> 1 {} found".format(archive_tag)
elif len(mncfiles) < 1:
checklist['notes'][i] = "No {} found.".format(archive_tag)
if pd.isnull(checklist[colname][i])==False:
mncpath = os.path.join(mncdir,checklist[colname][i])
if DEBUG: print("linking {} to {}".format(mncpath, target))
os.symlink(mncpath, target) | function | python | 187 |
size_t wxZipEntry::ReadDescriptor(wxInputStream& stream)
{
wxZipHeader ds(stream, SUMS_SIZE);
if (!ds)
return 0;
m_Crc = ds.Read32();
m_CompressedSize = ds.Read32();
m_Size = ds.Read32();
if (m_Crc == SUMS_MAGIC)
{
wxZipHeader buf(stream, 8);
wxUint32 u1 = buf.GetSize() >= 4 ? buf.Read32() : (wxUint32)LOCAL_MAGIC;
wxUint32 u2 = buf.GetSize() == 8 ? buf.Read32() : 0;
if ((u1 == LOCAL_MAGIC || u1 == CENTRAL_MAGIC) &&
(u2 != LOCAL_MAGIC && u2 != CENTRAL_MAGIC))
{
if (buf.GetSize() > 0)
stream.Ungetch(buf.GetData(), buf.GetSize());
}
else
{
if (buf.GetSize() > 4)
stream.Ungetch(buf.GetData() + 4, buf.GetSize() - 4);
m_Crc = wx_truncate_cast(wxUint32, m_CompressedSize);
m_CompressedSize = m_Size;
m_Size = u1;
return SUMS_SIZE + 4;
}
}
return SUMS_SIZE;
} | function | c++ | 188 |
@VisibleForTesting
public static long computeQueryMemory(DrillConfig config, OptionSet optionManager, long directMemory) {
long perQueryMemory = Math.round(directMemory *
optionManager.getOption(ExecConstants.PERCENT_MEMORY_PER_QUERY));
perQueryMemory = Math.max(perQueryMemory,
optionManager.getOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE));
long maxAllocPerNode = Math.min(directMemory,
config.getLong(RootAllocatorFactory.TOP_LEVEL_MAX_ALLOC));
maxAllocPerNode = Math.min(maxAllocPerNode, perQueryMemory);
return maxAllocPerNode;
} | function | java | 189 |
public override Type[] GetTypes()
{
List<Type> types = new List<Type>();
foreach (MemberInfo member in table.Values)
{
if (member is Type) types.Add( (Type)member );
}
return types.ToArray();
} | function | c# | 190 |
public class LanguageModel {
/** Programming language represented by this model. */
private final Language language;
/** Backing for collection of single-line comment delimiters. */
private List<String> singleCommentDelimiters = new LinkedList<String>();
/** Backing for collection of paired comment delimiters. */
private Map<String,String> pairedCommentDelimiters = new LinkedHashMap<String,String>();
/** Mapping from recognized source file extensions to programming languages.*/
private static Map<String,Language> languageForExtension;
static {
languageForExtension = new HashMap<String,Language>();
languageForExtension.put("sh", Language.BASH);
languageForExtension.put("c", Language.C);
languageForExtension.put("h", Language.C);
languageForExtension.put("cpp", Language.CPLUSPLUS);
languageForExtension.put("java", Language.JAVA);
languageForExtension.put("m", Language.MATLAB);
languageForExtension.put("py", Language.PYTHON);
languageForExtension.put("r", Language.R);
languageForExtension.put("sas", Language.SAS);
}
/** Utility method for looking up the programming language
* associated with the file extension of the provided file name.
* @param fileName The name of the file from which to infer the language.
* @return The inferred programming language, or
* {@link org.yesworkflow.Language Language}.GENERIC
* if the extension is not recognized.
*/
public static Language languageForFileName(String fileName) {
Language language = null;
int i = fileName.lastIndexOf(".");
if (i != -1) {
String extension = fileName.substring(i+1);
language = languageForExtension.get(extension.toLowerCase());
}
if (language == null) language = Language.GENERIC;
return language;
}
/** Constructor for models of languages not explicitly supported by
* YesWorkflow. Comment delimiter strings can be assigned using
* the {@link #singleDelimiter(String) delimiter()} and
* {@link #delimiterPair(String, String) delimiterPair()} methods.
*/
public LanguageModel() {
this.language = Language.GENERIC;
}
/** Constructor that builds a model for the given language.
* @param language The programming language to model.
*/
public LanguageModel(Language language) {
this.language = language;
assignCommentDelimiters();
}
/** Constructor that builds a model for the language inferred
* from the extension of the provided filename.
* @param fileName The file name from which to infer the programming language.
*/
public LanguageModel(String fileName) {
this(languageForFileName(fileName));
}
/** Provides access to the collection of strings
* that signal the beginning of single-line comments.
* @return The list of single-line comment delimiters.
*/
public List<String> getSingleCommentDelimiters() {
return new ArrayList<String>(singleCommentDelimiters);
}
/** Provides access to the collection of pairs of strings
* used to bracket delimited, possibly multi-line comments.
* @return The list of comment delimiters pairs.
*/
public Map<String,String> getPairedCommentDelimiters() {
return new HashMap<String,String>(pairedCommentDelimiters);
}
/** Provides access to the language modeled by this instance.
* @return The programming language.
*/
public Language getLanguage() {
return language;
}
/** Returns the name of the programming language modeled
* by this instance.
* @return The name of the programming language.
*/
public String toString() {
return language.toString();
}
/** Adds a single-line comment delimiter string to the model.
* @param start A string indicating the start of a one-line comment.
*/
public void singleDelimiter(String start) {
singleCommentDelimiters.add(start);
}
/** Adds a pair of comment-delimiting strings to the model.
* @param start A string indicating the start of a delimited, possibly multi-line comment.
* @param end The corresponding string indicating the end of the comment.
*/
public void delimiterPair(String start, String end) {
pairedCommentDelimiters.put(start, end);
}
/** Enumeration of match conditions returned from comment delimiter matching methods.
* Enables match methods to distinguish between matches to the two kinds of start delimiters,
* and between full and prefix matches.
*/
public enum MatchExtent {
NO_MATCH,
PREFIX_MATCH,
FULL_MATCH_SINGLE,
FULL_MATCH_PAIRED,
FULL_MATCH_SINGLE_PREFIX_MATCH_PAIRED,
FULL_MATCH_PAIRED_PREFIX_MATCH_SINGLE
}
/**
* Determines if the passed string matches any of the comment start delimiters
* defined for the language. Tries to match against the single delimiters
* used to start one-line comments, as well as the start delimiters of
* delimiter pairs used to define partial-line or multi-line comments. The
* return value distinguishes between matches to the two kinds of start delimiters,
* and between full and prefix matches.
*
* @param s The potential comment start delimiter to be tested.
* @return The extent of the match found.
*/
public MatchExtent commentStartMatches(String s) {
int length = s.length();
// look for a match with single-line comment start delimiter
MatchExtent singleMatchExtent = MatchExtent.NO_MATCH;
for (String singleCommentDelimiter : singleCommentDelimiters) {
if (singleCommentDelimiter.startsWith(s)) {
singleMatchExtent = (length == singleCommentDelimiter.length()) ?
MatchExtent.FULL_MATCH_SINGLE : MatchExtent.PREFIX_MATCH;
break;
}
}
// look for a match with partial-line/multi-line comment start delimiters
MatchExtent pairedMatchExtent = MatchExtent.NO_MATCH;
for (String startCommentDelimiter : pairedCommentDelimiters.keySet()) {
if (startCommentDelimiter.startsWith(s)) {
pairedMatchExtent = (length == startCommentDelimiter.length()) ?
MatchExtent.FULL_MATCH_PAIRED : MatchExtent.PREFIX_MATCH;
break;
}
}
switch(singleMatchExtent) {
case PREFIX_MATCH:
switch(pairedMatchExtent) {
case FULL_MATCH_PAIRED:
return MatchExtent.FULL_MATCH_PAIRED_PREFIX_MATCH_SINGLE;
default:
return MatchExtent.PREFIX_MATCH;
}
case FULL_MATCH_SINGLE:
switch(pairedMatchExtent) {
case PREFIX_MATCH:
return MatchExtent.FULL_MATCH_SINGLE_PREFIX_MATCH_PAIRED;
default:
return MatchExtent.FULL_MATCH_SINGLE;
}
default:
return pairedMatchExtent;
}
}
/**
* Determines if the passed string matches the comment end delimiter
* corresponding to the provided comment start delimiter. The
* return value distinguishes between full and prefix matches.
*
* @param s The potential comment end delimiter to be tested.
* @param startDelimiter The comment start delimiter corresponding to the expected
* end delimiter.
* @return The extent of the match found.
*/
public MatchExtent commentEndMatches(String s, String startDelimiter) {
String endCommentDelimiter = pairedCommentDelimiters.get(startDelimiter);
if (endCommentDelimiter.startsWith(s)) {
if (s.length() == endCommentDelimiter.length()) {
return MatchExtent.FULL_MATCH_PAIRED;
} else {
return MatchExtent.PREFIX_MATCH;
}
} else {
return MatchExtent.NO_MATCH;
}
}
/** Assigns comment delimiter strings to the model according to the
* language the model represents.
*/
private void assignCommentDelimiters() {
if (language != null) {
switch(language) {
case BASH:
singleDelimiter("#");
break;
case C:
singleDelimiter("//");
delimiterPair("/*", "*/");
break;
case CPLUSPLUS:
singleDelimiter("//");
delimiterPair("/*", "*/");
break;
case GENERIC:
break;
case JAVA:
singleDelimiter("//");
delimiterPair("/*", "*/");
break;
case MATLAB:
singleDelimiter("%");
delimiterPair("%{", "%}");
delimiterPair("...", "...");
break;
case PYTHON:
singleDelimiter("#");
delimiterPair("\"\"\"", "\"\"\"");
delimiterPair("'''", "'''");
break;
case R:
singleDelimiter("#");
break;
case SAS:
delimiterPair("*", ";");
delimiterPair("/*", "*/");
break;
}
}
}
} | class | java | 191 |
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(CreditsDialogue));
this.CreditsDialogueButton = new System.Windows.Forms.Button();
this.CreditsText = new System.Windows.Forms.RichTextBox();
this.SuspendLayout();
CreditsDialogueButton
this.CreditsDialogueButton.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.CreditsDialogueButton.Location = new System.Drawing.Point(162, 252);
this.CreditsDialogueButton.Margin = new System.Windows.Forms.Padding(6);
this.CreditsDialogueButton.Name = "CreditsDialogueButton";
this.CreditsDialogueButton.Size = new System.Drawing.Size(150, 45);
this.CreditsDialogueButton.TabIndex = 3;
this.CreditsDialogueButton.Text = "Return";
this.CreditsDialogueButton.UseVisualStyleBackColor = true;
this.CreditsDialogueButton.Click += new System.EventHandler(this.CreditsDialogueButton_Click);
CreditsText
this.CreditsText.BackColor = System.Drawing.SystemColors.Menu;
this.CreditsText.Cursor = System.Windows.Forms.Cursors.Arrow;
this.CreditsText.Font = new System.Drawing.Font("Microsoft Sans Serif", 10.15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(238)));
this.CreditsText.Location = new System.Drawing.Point(15, 15);
this.CreditsText.Margin = new System.Windows.Forms.Padding(6);
this.CreditsText.Name = "CreditsText";
this.CreditsText.ReadOnly = true;
this.CreditsText.Size = new System.Drawing.Size(445, 225);
this.CreditsText.TabIndex = 4;
this.CreditsText.Text = resources.GetString("CreditsText.Text");
CreditsDialogue
this.AutoScaleDimensions = new System.Drawing.SizeF(16F, 31F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.CreditsDialogueButton;
this.CausesValidation = false;
this.ClientSize = new System.Drawing.Size(475, 308);
this.ControlBox = false;
this.Controls.Add(this.CreditsText);
this.Controls.Add(this.CreditsDialogueButton);
this.Font = new System.Drawing.Font("Microsoft Sans Serif", 16.2F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(238)));
this.Margin = new System.Windows.Forms.Padding(6);
this.Name = "CreditsDialogue";
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Credits";
this.TopMost = true;
this.ResumeLayout(false);
} | function | c# | 192 |
void CommandLineInterface::Run_DC(agent* thisAgent, int run_count)
{
std::ostringstream tempString;
tempString << "MemCon| Running for " << run_count << " decision cycles.\n";
PrintCLIMessage(&tempString);
cli::Options opt;
cli::Cli::RunBitset options(0);
DoRun(options, run_count, cli::Cli::RUN_INTERLEAVE_DEFAULT);
} | function | c++ | 193 |
public override void Decode(byte[] bytes)
{
var reader = new PacketReader(bytes);
Length = reader.ReadUInt16();
Type = (PacketType) reader.ReadUInt16();
Username = reader.ReadString(16);
CharacterName = reader.ReadString(16);
MaskedPassword = reader.ReadString(16);
Mesh = reader.ReadUInt16();
Class = reader.ReadUInt16();
Token = reader.ReadUInt32();
} | function | c# | 194 |
private static void ProcessCompletedOuterTask(TaskCompletionSource<TResult> tcs, Task task)
{
Contract.Assert(tcs != null);
Contract.Assert(task != null && task.IsCompleted);
switch (task.Status)
{
case TaskStatus.Canceled:
case TaskStatus.Faulted:
var result = TrySetFromTask(tcs, task);
Contract.Assert(result, "Expected TrySetFromTask from outer task to succeed");
break;
case TaskStatus.RanToCompletion:
var taskOfTaskOfTResult = task as Task<Task<TResult>>;
ProcessInnerTask(tcs, taskOfTaskOfTResult != null ?
taskOfTaskOfTResult.Result : ((Task<Task>)task).Result);
break;
}
} | function | c# | 195 |
def lag(self, shift, time=None, index=None, metrics=None, yvars=None, silent=True):
index = self.index if index is None else index
metrics = self.metrics if metrics is None else metrics
time = self.time if time is None else time
df = self.data_.copy()
metrics = df.drop(time + index, axis=1).columns.values.tolist() if metrics is None else metrics
side_columns = [x for x in df.columns if x not in (index + metrics)]
df = df.set_index(index)
reserve = df[side_columns]
df = df.drop(side_columns, axis=1)
df = df.stack()
metric_name = 'metric' if None in df.index.names else [x for x in df.index.names if x not in index][0]
df.index.names = [metric_name if x is None else x for x in df.index.names]
df = df.unstack([x for x in df.index.names if x not in time])
df_shift = df.shift(shift).copy()
df = pd.concat([df, df_shift], keys=[0, shift], names=['lag'] + time)
df.columns.names = [metric_name] if df.columns.names == [None] else df.columns.names
df = df.T.stack(df.index.names).unstack(['lag', metric_name])
if type(df.columns) == pd.MultiIndex:
df.columns = df.columns.reorder_levels(['lag', metric_name])
df = df.T.sort_index().T
if type(df.index) == pd.MultiIndex:
df.index = df.index.reorder_levels(reserve.index.names)
reserve.index = reserve.index.reorder_levels(reserve.index.names)
reserve.columns = [(0, x) for x in reserve.columns]
dropna_cols = df.columns.values
if min(reserve.shape) > 0:
df = pd.concat([df, reserve], axis=1)
df = df.dropna(subset=dropna_cols)
df.columns = pd.MultiIndex.from_tuples(df.columns, names=['lag', metric_name])
if yvars is not None:
drop_cols = [(0, x) for x in metrics if x not in yvars]
keep_cols = [x for x in df.columns.values if x not in drop_cols]
df = df[keep_cols]
self.lagged[shift] = df.copy()
if not silent:
return df.copy() | function | python | 196 |
def DSpikeSolve(Sample, Spike, Standard, Mass, RatioMass, Anneal=False):
A = np.matrix(
[[Spike[i] - Standard[i],
-Standard[i] * math.log(Mass[i]/RatioMass),
Sample[i] * math.log(Mass[i]/RatioMass)] for i in range(len(Spike))])
b = np.array([Sample[i] - Standard[i] for i in range(len(Spike))])
x = np.linalg.solve(A,b)
x1 = [x.item(0),x.item(1),x.item(2)]
p = [(x1[1]/(1-x1[0])),x1[2],1-x1[0]]
def equation(x):
equation = np.array([Standard[i]*(Mass[i]/RatioMass)**(-x[0])*x[2]+
Spike[i]*(1-x[2])-
Sample[i]*(Mass[i]/RatioMass)**(-x[1]) for i in range(len(Spike))])
return equation
warnings.simplefilter("error", RuntimeWarning)
try:
alpha,beta,lamb = fsolve(lambda x: equation(x),x0=p,xtol=1e-10, maxfev= 10000, factor = 0.5)
success = True
print(Sample, "I AM SAMPLE")
print(Standard, "I AM STANDARD")
print(Spike, "I AM SPIKE")
print(p, "I AM INITIAL")
print()
return alpha, beta, lamb, success
except RuntimeWarning:
alpha = "No Convergence"
beta = "No Convergence"
lamb = "No Convergence"
success = False
return alpha, beta, lamb, success | function | python | 197 |
RemoveArgs::PassStats RemoveArgs::run(ConfigFiles& config) {
RemoveArgs::PassStats pass_stats;
gather_results_used();
auto override_graph = mog::build_graph(m_scope);
compute_reordered_protos(*override_graph);
auto method_stats =
update_method_protos(*override_graph, config.get_do_not_devirt_anon());
pass_stats.method_params_removed_count =
method_stats.method_params_removed_count;
pass_stats.methods_updated_count = method_stats.methods_updated_count;
pass_stats.callsite_args_removed_count = update_callsites();
pass_stats.method_results_removed_count =
method_stats.method_results_removed_count;
pass_stats.method_protos_reordered_count =
method_stats.method_protos_reordered_count;
pass_stats.local_dce_stats = method_stats.local_dce_stats;
return pass_stats;
} | function | c++ | 198 |
def unique_edge_list(network):
edge_list = []
nodes = [network.network_dict[node] for node in network.nodes()]
for node in nodes:
adjacents = node.get_adjacents()
for adjacent in adjacents:
edge = (node.node_id, adjacent)
alternate = (adjacent, node.node_id)
if edge not in edge_list and alternate not in edge_list:
edge_list.append(edge)
return edge_list | function | python | 199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.