code
stringlengths
0
30.8k
source
stringclasses
6 values
language
stringclasses
9 values
__index_level_0__
int64
0
100k
func (nt *NamespacesTester) EnsureDumpAllForNamespace( md namespace.Metadata, ) (DecodedBlockMap, error) { id := md.ID().String() for _, acc := range nt.Accumulators { if acc.ns != id { continue } writeMap := acc.writeMap loadedBlockMap := acc.dumpLoadedBlocks() merged := make(DecodedBlockMap, len(writeMap)+len(loadedBlockMap)) for k, v := range writeMap { merged[k] = v } for k, v := range loadedBlockMap { if vals, found := merged[k]; found { merged[k] = append(vals, v...) } else { merged[k] = v } } return merged, nil } return nil, fmt.Errorf("namespace with id %s not found "+ "valid namespaces are %v", id, nt.Namespaces) }
function
go
99,800
bool Environment::Initialize(const vec2i& , const char* , WindowMode ) { #if defined(_WIN32) && !defined(FPLBASE_GLES) #define GLEXT(type, name, required) \ LOOKUP_GL_FUNCTION(type, name, required, wglGetProcAddress) GLBASEEXTS GLEXTS #undef GLEXT #endif #ifndef PLATFORM_MOBILE #ifdef GL_MAJOR_VERSION GLint version = 0; glGetIntegerv(GL_MAJOR_VERSION, &version); if (glGetError() == 0) { if (version >= 3) { feature_level_ = kFeatureLevel30; } } #endif #endif #ifdef __ANDROID__ const int version = AndroidGetContextClientVersion(); if (version >= 3) { feature_level_ = kFeatureLevel30; AndroidInitGl3Functions(); } #ifdef FPLBASE_GLES #define GLEXT(type, name, required) \ LOOKUP_GL_FUNCTION(type, name, required, eglGetProcAddress) GLESEXTS #undef GLEXT #endif #endif #if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR const int version = IosGetContextClientVersion(); assert(version >= 2); if (version >= 3) { feature_level_ = kFeatureLevel30; } #endif return true; }
function
c++
99,801
protected void createStartEvents(Attributes attributes, Collection[] events) { Map nsMap = null; List attrs = null; if (namespaces != null) { Iterator prefixes = namespaces.getDeclaredPrefixes(); while (prefixes.hasNext()) { String prefix = (String) prefixes.next(); String uri = namespaces.getNamespaceURI(prefix); Namespace ns = createNamespace(prefix, uri); if (nsMap == null) { nsMap = new HashMap(); } nsMap.put(prefix, ns); } } String[] qname = { null, null }; for (int i = 0, s = attributes.getLength(); i < s; i++) { parseQName(attributes.getQName(i), qname); String attrPrefix = qname[0]; String attrLocal = qname[1]; String attrQName = attributes.getQName(i); String attrValue = attributes.getValue(i); String attrURI = attributes.getURI(i); if ("xmlns".equals(attrQName) || "xmlns".equals(attrPrefix)) { if (!nsMap.containsKey(attrPrefix)) { Namespace ns = createNamespace(attrPrefix, attrValue); if (nsMap == null) { nsMap = new HashMap(); } nsMap.put(attrPrefix, ns); } } else { Attribute attribute; if (attrPrefix.length() > 0) { attribute = eventFactory.createAttribute(attrPrefix, attrURI, attrLocal, attrValue); } else { attribute = eventFactory.createAttribute(attrLocal, attrValue); } if (attrs == null) { attrs = new ArrayList(); } attrs.add(attribute); } } events[0] = (nsMap == null ? Collections.EMPTY_LIST : nsMap.values()); events[1] = (attrs == null ? Collections.EMPTY_LIST : attrs); }
function
java
99,802
private string SetPathToLoad(CultureInfo cultureInfo) { if (Directory.Exists(Path.Combine(LocalizationPath, cultureInfo.Name))) pathToLoad = SelectLanguageFolder(cultureInfo, true); else if (Directory.Exists(Path.Combine(LocalizationPath, cultureInfo.TwoLetterISOLanguageName))) pathToLoad = SelectLanguageFolder(cultureInfo, false); else if (Directory.Exists(Path.Combine(LocalizationPath, CultureInfo.GetCultureInfo(DefaultLocal).Name))) pathToLoad = SelectLanguageFolder(CultureInfo.GetCultureInfo(DefaultLocal), true); else if (Directory.Exists(Path.Combine(LocalizationPath, CultureInfo.GetCultureInfo(DefaultLocal).TwoLetterISOLanguageName))) pathToLoad = SelectLanguageFolder(CultureInfo.GetCultureInfo(DefaultLocal), false); else throw new LanguageFolderException(CultureInfo.CurrentCulture, CultureInfo.GetCultureInfo(DefaultLocal)); return pathToLoad; }
function
c#
99,803
TEST_F(TrackedObjectsTest, LifeCycleMidDeactivatedToSnapshotMainThread) { ThreadData::InitializeAndSetTrackingStatus(ThreadData::PROFILING_ACTIVE); const char kFunction[] = "LifeCycleMidDeactivatedToSnapshotMainThread"; Location location(kFunction, kFile, kLineNumber, NULL); TallyABirth(location, kMainThreadName); const TrackedTime kTimePosted = TrackedTime::FromMilliseconds(1); const base::TimeTicks kDelayedStartTime = base::TimeTicks(); base::TrackingInfo pending_task(location, kDelayedStartTime); pending_task.time_posted = kTimePosted; ThreadData::InitializeAndSetTrackingStatus(ThreadData::DEACTIVATED); const unsigned int kStartOfRun = 5; const unsigned int kEndOfRun = 7; SetTestTime(kStartOfRun); TaskStopwatch stopwatch; stopwatch.Start(); SetTestTime(kEndOfRun); stopwatch.Stop(); ThreadData::TallyRunOnNamedThreadIfTracking(pending_task, stopwatch); ProcessDataSnapshot process_data; ThreadData::Snapshot(0, &process_data); ExpectSimpleProcessData(process_data, kFunction, kMainThreadName, kMainThreadName, 1, 2, 4); }
function
c++
99,804
public byte[] evaluateChallenge(byte[] challengeData) throws SaslException { if (challengeData.length > MAX_CHALLENGE_LENGTH) { throw new SaslException( "DIGEST-MD5: Invalid digest-challenge length. Got: " + challengeData.length + " Expected < " + MAX_CHALLENGE_LENGTH); } /* Extract and process digest-challenge */ byte[][] challengeVal; switch (step) { case 2: /* Process server's first challenge (from Step 1) */ /* Get realm, qop, maxbuf, charset, algorithm, cipher, nonce directives */ List<byte[]> realmChoices = new ArrayList<byte[]>(3); challengeVal = parseDirectives(challengeData, DIRECTIVE_KEY, realmChoices, REALM); try { processChallenge(challengeVal, realmChoices); checkQopSupport(challengeVal[QOP], challengeVal[CIPHER]); ++step; return generateClientResponse(challengeVal[CHARSET]); } catch (SaslException e) { step = 0; clearPassword(); throw e; } catch (IOException e) { step = 0; clearPassword(); throw new SaslException("DIGEST-MD5: Error generating " + "digest response-value", e); } case 3: try { /* Process server's step 3 (server response to digest response) */ /* Get rspauth directive */ challengeVal = parseDirectives(challengeData, DIRECTIVE_KEY, null, REALM); validateResponseValue(challengeVal[RESPONSE_AUTH]); /* Initialize SecurityCtx implementation */ if (integrity && privacy) { secCtx = new DigestPrivacy(true /* client */); } else if (integrity) { secCtx = new DigestIntegrity(true /* client */); } return null; } finally { clearPassword(); step = 0; completed = true; } default: throw new SaslException("DIGEST-MD5: Client at illegal state"); } }
function
java
99,805
BeaconSetupService::SerialisedMsg BeaconSetupService::GetComplaintAnswers() { std::lock_guard<std::mutex> lock(mutex_); complaints_manager_.Finish(valid_dkg_members_); complaint_answers_manager_.Init(complaints_manager_.Complaints()); SharesExposedMap complaint_answer; for (auto const &reporter : complaints_manager_.ComplaintsAgainstSelf()) { complaint_answer.insert({reporter, beacon_->GetOwnShares(reporter)}); } return serialisers::Serialise(complaint_answer); }
function
c++
99,806
def inflate(self, shift=None, volume=None): if shift is not None and volume is not None: raise ValueError("Only shift of volume should be given to inflate") if shift is None and volume is None: raise ValueError("Either shift or volume should be specified to inflate") if shift is None: ref_vol = self.volume() shift = optimize.minimize_scalar( lambda test_shift: (self.inflate(shift=test_shift).volume() - ref_vol - volume) ** 2 ).x return Mesh2D(self.vertices + self.normal(atpoint=True) * shift, self.faces, flip_normal=self.flip_normal)
function
python
99,807
private void readResponse(byte[] data) { try { ParsedMessage parsed = this.parser.parseMsg(data); if (parsed instanceof AuthParsedMessage) { int requestId = ((AuthParsedMessage) parsed).getRequestId(); RequestResult res = results.get(requestId); if (res != null) { logger.debug("Received message from authentication module: " + parsed.getClass().getName()); res.setResult(parsed); synchronized (res) { res.notify(); } } else { logger.warn("Received message without callback mapping: " + requestId); } } else { logger.warn("Received SESSION CLOSE message."); } } catch (ParsingException e) { logger.error("Could not parse incoming AUTH message: " + e.getMessage()); } }
function
java
99,808
fn map_unique_attr<T, F>(ast: &syn::DeriveInput, name: &str, f: F) -> Option<T> where F: Fn(syn::Meta) -> T, { let mut t_iter = ast .attrs .iter() .filter_map(syn::Attribute::interpret_meta) .filter(|meta| meta.name() == name) .map(f); match (t_iter.next(), t_iter.next()) { (_, Some(_)) => panic!("Multiple {} attributes specified.", name), (result, None) => result, } }
function
rust
99,809
public static UvssSetTriggerActionSyntax SetTriggerAction( SyntaxToken setKeyword, UvssPropertyNameSyntax propertyName, UvssSelectorWithParenthesesSyntax selector, UvssPropertyValueWithBracesSyntax value) { return new UvssSetTriggerActionSyntax( setKeyword, propertyName, selector, value); }
function
c#
99,810
Block* split_block(Block* blk, unsigned int sz) { if (blk->size < sz + sizeof(Block)) { return NULL; } blk->size = blk->size - (sz + sizeof(Block)); if (blk->next) { blk->next->prev_size = blk->size; } return (Block*)(blk + (blk->size + sizeof(blk))); }
function
c
99,811
public class AmqpSerializer { private static final AmqpSerializer instance; private final CustomType customType = new CustomType(); static { instance = new AmqpSerializer(); } /** * Encodes an object graph into bytes. * @param buffer Buffer to save the bytes. The buffer's position is * advanced after bytes are written. * @param graph Object to be encoded. * @throws AmqpIoException */ public static void serialize(ByteBuffer buffer, Object graph) throws AmqpIoException { instance.writeObject(buffer, graph); } /** * Decodes an object of the specified type from the buffer. * @param <T> Expected type of the object. The buffer's position is * advanced after bytes are read. * @param c Class of the type. * @param buffer Buffer to read bytes. * @return Object of type T. * @throws AmqpIoException */ public static <T> T deserialize(Class<T> c, ByteBuffer buffer) throws AmqpIoException { return (T)instance.readObject(c, buffer); } /** * Encodes an object graph into bytes. * @param buffer Buffer to save the bytes. The buffer's position is * advanced after bytes are written. * @param graph Object to be encoded. * @throws AmqpIoException */ public void writeObject(ByteBuffer buffer, Object graph) throws AmqpIoException { try { this.customType.writeObject(buffer, graph, new HashSet()); } catch (AmqpIoException ioe) { throw ioe; } catch (Exception e) { throw new AmqpIoException(e.getMessage(), e); } } /** * Decodes an object of the specified type from the buffer. * @param <T> Expected type of the object. The buffer's position is * advanced after bytes are read. * @param c Class of the type. * @param buffer Buffer to read bytes. * @return Object of type T. * @throws AmqpIoException */ public <T> T readObject(Class c, ByteBuffer buffer) throws AmqpIoException { try { return (T)this.customType.readObject(c, buffer); } catch (AmqpIoException ioe) { throw ioe; } catch (Exception e) { throw new AmqpIoException(e.getMessage(), e); } } }
class
java
99,812
public static Color showDialog(Component component, String title, Color initial) { JColorChooser choose = new JColorChooser(initial); JDialog dialog = createDialog(component, title, true, choose, null, null); dialog.getContentPane().add(choose); dialog.pack(); dialog.show(); return choose.getColor(); }
function
java
99,813
public class VipCustomer { private String name; private String emailAddress; private double creditLimit; public VipCustomer() { this("default name", "[email protected]", 1000.00); } public VipCustomer(String name, String emailAddress) { this(name, emailAddress, 3000.00); } public VipCustomer(String name, String emailAddress, double creditLimit) { this.name = name; this.emailAddress = emailAddress; this.creditLimit = creditLimit; } public String getName() { return name; } public String getEmailAddress() { return emailAddress; } public double getCreditLimit() { return creditLimit; } }
class
java
99,814
protected static int levelCapacity(int lev) { if(lev == 1) return 2; else if(lev == 2) return 8; else if(lev == 3) return 18; else if(lev == 4) return 32; else if(lev == 5) return 32; else if(lev == 6) return 18; else if(lev == 7) return 8; return 0; }
function
java
99,815
def select(self, id, start, stop, rate=False, maxlen=float("inf"), fixed=0): minstep = total_seconds(stop - start) / maxlen for index, model in enumerate(self): if start >= model.start(id) and model.step >= minstep: break points = model.select(id, dt__gte=start, dt__lt=stop) points = list(points if index else model.reduce(points)) if rate: points = map(operator.sub, points[1:], points[:-1]) if fixed: step = (stop - start) / fixed intervals = [Point(start + step * index, 0.0, 0) for index in range(fixed)] for point in points: intervals[int(total_seconds(point.dt - start) / total_seconds(step))] += point points = intervals return points
function
python
99,816
@Override public void run() { if (shouldSkipCheck()) { skippedHealthChecks++; LOG.info("Detected long delay in scheduling HB processing thread. " + "Skipping heartbeat checks for one iteration."); } else { checkNodesHealth(); } This time taken to work can skew the heartbeat processor thread. The reason why we don't care is because of the following reasons. 1. checkerInterval is general many magnitudes faster than datanode HB frequency. 2. if we have too much nodes, the SCM would be doing only HB processing, this could lead to SCM's CPU starvation. With this approach we always guarantee that HB thread sleeps for a little while. 3. It is possible that we will never finish processing the HB's in the thread. But that means we have a mis-configured system. We will warn the users by logging that information. 4. And the most important reason, heartbeats are not blocked even if this thread does not run, they will go into the processing queue. scheduleNextHealthCheck(); }
function
java
99,817
def well_to_index( plate, row, column, across_row_first=True, element_coordinates=[None,None] ): row = row.lower() if( plate < 1 or plate > 4 or not row in [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h' ] or column < 1 or column > 12 ): print( 'Error: spreadsheet cell: %s%s: bad well values: plate: %d row: %s col: %d' % ( element_coordinates[0], element_coordinates[1], plate, row, column ), file=sys.stderr ) sys.exit( -1 ) irow = ord( row ) - ord( 'a' ) icol = column - 1 if( across_row_first ): well_index = irow * 12 + icol + 1 else: well_index = icol * 8 + irow + 1 return( well_index + ( plate - 1 ) * 96 )
function
python
99,818
private bool LoadCollections(XPathNavigator source, XmlNamespaceManager manager) { bool wasLoaded = false; Guard.ArgumentNotNull(source, "source"); Guard.ArgumentNotNull(manager, "manager"); XPathNodeIterator authorIterator = source.Select("atom:author", manager); XPathNodeIterator contributorIterator = source.Select("atom:contributor", manager); XPathNodeIterator categoryIterator = source.Select("atom:category", manager); XPathNodeIterator linkIterator = source.Select("atom:link", manager); if (authorIterator != null && authorIterator.Count > 0) { while (authorIterator.MoveNext()) { AtomPersonConstruct author = new AtomPersonConstruct(); if (author.Load(authorIterator.Current)) { this.Authors.Add(author); wasLoaded = true; } } } if (categoryIterator != null && categoryIterator.Count > 0) { while (categoryIterator.MoveNext()) { AtomCategory category = new AtomCategory(); if (category.Load(categoryIterator.Current)) { this.Categories.Add(category); wasLoaded = true; } } } if (contributorIterator != null && contributorIterator.Count > 0) { while (contributorIterator.MoveNext()) { AtomPersonConstruct contributor = new AtomPersonConstruct(); if (contributor.Load(contributorIterator.Current)) { this.Contributors.Add(contributor); wasLoaded = true; } } } if (linkIterator != null && linkIterator.Count > 0) { while (linkIterator.MoveNext()) { AtomLink link = new AtomLink(); if (link.Load(linkIterator.Current)) { this.Links.Add(link); wasLoaded = true; } } } return wasLoaded; }
function
c#
99,819
def createCrossholeData(sensors): from itertools import product if len(sensors) % 2 > 0: pg.error("createCrossholeData is only defined for an equal number of" " sensors in two boreholes.") n = len(sensors) // 2 numbers = np.arange(n) rays = np.array(list(product(numbers, numbers + n))) scheme = pg.DataContainer() for sen in sensors: scheme.createSensor(sen) scheme.resize(len(rays)) scheme["s"] = rays[:, 0] scheme["g"] = rays[:, 1] scheme["valid"] = np.ones(len(rays)) scheme.registerSensorIndex("s") scheme.registerSensorIndex("g") return scheme
function
python
99,820
function isLoggedIn(callback) { store.get("user", function(err, user) { store.get("session", function(err, session) { if ( user === undefined || session === undefined ) { return callback(false); } if ( !session.inactive || !session.expires ) { return callback(false) } let now = new Date(); if ( now >= session.inactive || now >= session.expires ) { return callback(false); } return callback(true, user, session); }); }); }
function
javascript
99,821
public byte[] computeSignature(DexBuffer dex) throws IOException { MessageDigest digest; try { digest = MessageDigest.getInstance("SHA-1"); } catch (NoSuchAlgorithmException e) { throw new AssertionError(); } int offset = SIGNATURE_OFFSET + SIGNATURE_SIZE; byte[] bytes = dex.getBytes(); digest.update(bytes, offset, bytes.length - offset); return digest.digest(); }
function
java
99,822
def softmaxFunction(self, vector): sum = 0 for i in range(vector.shape[0]): sum += math.exp(vector[i]) answer_vector = np.zeros((vector.shape[0],)) for i in range(vector.shape[0]): answer_vector[i] = (math.exp(vector[i])) / sum return answer_vector
function
python
99,823
class MediaQuery{ constructor(options,callback,context=this){ let {query,full=false} = options; this._matches = false; this.full = full; this._mq = null; this._callback = callback; this._context = context; this._bound = this._onMatch.bind(this); this.query=query; } _onMatch(mq){ this._matches = mq.matches; return this._callback.call(this._context,mq.matches) } get matches(){ return this._matches; } get query(){ return this._query; } set query(val){ this._query = val; this.constructor.remove(this._mq,this._bound); let query = this.query; if (!query) { return; } if (!this.full && query[0] !== '(') { query = '(' + query + ')'; } this._mq = window.matchMedia(query); this._onMatch(this._mq); this.constructor.add(this._mq,this._bound); } static add(mq,bound){ if (mq) { mq.addListener(bound); } } static remove(mq,bound) { if (mq) { mq.removeListener(bound); } mq = null; } }
class
javascript
99,824
public class ComponentMaximiserMouseListener extends MouseAdapter { private static final Logger LOGGER = Logger.getLogger(ComponentMaximiserMouseListener.class); private static final String DOUBLE_CLICK_WARN_MESSAGE = Constant.messages.getString("tab.doubleClick.warning"); /** * The view options to check (and update) if it's required a confirmation from the user to maximise the component. Never * {@code null}. * * @see #confirmMaximisation() */ private final OptionsParamView viewOptions; /** * The delegate used to (un)maximise the component, might be {@code null}. * * @see #triggerMaximisation(Component) */ private ComponentMaximiser componentMaximiser; /** * Constructs a {@code ComponentMaximiserMouseListener} with the given view options. * <p> * The view options to check (and update) if it's required a confirmation from the user to maximise the component. * * @param viewOptions the view options * @throws IllegalArgumentException if the parameter {@code viewOptions} is {@code null}. * @see #setComponentMaximiser(ComponentMaximiser) */ public ComponentMaximiserMouseListener(OptionsParamView viewOptions) { this(viewOptions, null); } /** * Constructs a {@code ComponentMaximiserMouseListener} with the given view options and given component maximiser. * <p> * The view options to check (and update) if it's required a confirmation from the user to maximise the component. * * @param viewOptions the view options * @param componentMaximiser the object responsible for maximising the component, might be {@code null}. * @throws IllegalArgumentException if the parameter {@code viewOptions} is {@code null}. */ public ComponentMaximiserMouseListener(OptionsParamView viewOptions, ComponentMaximiser componentMaximiser) { if (viewOptions == null) { throw new IllegalArgumentException("Parameter viewOptions must not be null."); } this.viewOptions = viewOptions; setComponentMaximiser(componentMaximiser); } /** * Sets the {@code ComponentMaximiser} that will be used to maximise components. Might be {@code null}, in which case there * will be no maximisation when a component is clicked twice. * * @param componentMaximiser the {@code ComponentMaximiser} that will be used to maximise components * @see #getComponentMaximiser() * @see #triggerMaximisation(Component) */ public void setComponentMaximiser(ComponentMaximiser componentMaximiser) { this.componentMaximiser = componentMaximiser; } /** * Gets the {@code ComponentMaximiser} that's used to maximise components. * * @return the {@code ComponentMaximiser} that's used to maximise the components, might be {@code null} * @see #setComponentMaximiser(ComponentMaximiser) */ public ComponentMaximiser getComponentMaximiser() { return componentMaximiser; } /** * Calls {@link #triggerMaximisation(Component)} when clicked twice, with the source of the event as parameter. */ @Override public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { triggerMaximisation((Component) evt.getSource()); } } /** * Convenience method that programmatically triggers the (un)maximisation logic. * <p> * If a component is already maximised it's unmaximised, otherwise it is maximised the given {@code component}. This is the * same logic that's executed when a component is clicked twice, being the {@code component} the source of the mouse event. * <p> * The call to this method has no effect if there's no {@code ComponentMaximiser}. * * @param component the component that will be maximised, if none is maximised already * @throws IllegalArgumentException if the given {@code component} is {@code null} and there's no component maximised. * @see #setComponentMaximiser(ComponentMaximiser) */ public void triggerMaximisation(Component component) { if (componentMaximiser == null) { return; } if (componentMaximiser.isComponentMaximised()) { componentMaximiser.unmaximiseComponent(); } else if (confirmMaximisation()) { componentMaximiser.maximiseComponent(component); } } /** * Confirms, by asking the user, if the maximisation should be done. * <p> * After positive confirmation this method returns always {@code true}. * * @return {@code true} if the maximisation should be done, {@code false} otherwise. * @see #triggerMaximisation(Component) * @see OptionsParamView#getWarnOnTabDoubleClick() */ private boolean confirmMaximisation() { if (!viewOptions.getWarnOnTabDoubleClick()) { return true; } if (View.getSingleton().showConfirmDialog(DOUBLE_CLICK_WARN_MESSAGE) != JOptionPane.OK_OPTION) { return false; } viewOptions.setWarnOnTabDoubleClick(false); try { viewOptions.getConfig().save(); } catch (ConfigurationException e) { LOGGER.error(e.getMessage(), e); } return true; } }
class
java
99,825
public void shareApp(Activity activity, String app_share_message, String app_share_subject) { try { Intent sendIntent = new Intent(Intent.ACTION_SEND); sendIntent.setType("text/plain"); if (!TextUtils.isEmpty(app_share_subject)) { sendIntent.putExtra(Intent.EXTRA_SUBJECT, app_share_subject); } String message; if (!TextUtils.isEmpty(app_share_message)) { message = app_share_message + "http://play.google.com/store/apps/details?id=" + activity.getPackageName(); } else { message = "http://play.google.com/store/apps/details?id=" + activity.getPackageName(); } sendIntent.putExtra(Intent.EXTRA_TEXT, message); activity.startActivity(sendIntent); } catch (Exception e) { e.printStackTrace(); } }
function
java
99,826
public LocalDate parseLastUpdateAsDate() { if (lastUpdate == null || lastUpdate.isBlank()) { return null; } final String lastUpdateWithoutText = lastUpdate.replace(" Uhr", "").replace(",", ""); return LocalDate.parse(lastUpdateWithoutText, DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm")); }
function
java
99,827
pub fn upload_geometry( &mut self, geometry: Geometry, ) -> InstancedGeometry { let gpu_data = self.create_gpu_data(geometry); InstancedGeometry { gpu_data } }
function
rust
99,828
public static String toShortName(String longClassName) { int lastDotPos = longClassName.lastIndexOf('.'); if (lastDotPos != -1) { return longClassName.substring(lastDotPos + 1); } return longClassName; }
function
java
99,829
public final class BatchAction implements Action<BatchResult> { /** * The list of actions in this batch. Non-{@code final} to allow for GWT * serialization, but never altered in practice. */ private ImmutableList<Action<?>> actions; // serialization support @SuppressWarnings("unused") private BatchAction() { } /** * Constructs a new {@code BatchAction} from the specified list of actions. * A copy of the provided list is created and stored internally. * * @param actions * the actions to submit as a batch */ public BatchAction(List<? extends Action<?>> actions) { this.actions = ImmutableList.copyOf(actions); } /** * @return the actions in this batch */ public List<Action<?>> getActions() { return actions; } }
class
java
99,830
private License parseLicense(Document details) throws ParseException { String name = details.select("span#lblName2").text(); String address = details.select("span#lblAddress").text(); String licenseType = details.select("span#lblLicenseType").text(); String licenseNumber = details.select("span#lblLicenseNumber").text(); String licenseStatus = details.select("span#lblLicenseStatus").text(); String originalIssueDate = details.select("span#lblOriginalIssueDate").text(); String expirationDate = details.select("span#lblExpirationDate").text(); String disciplinaryAction = details.select("span#lblDisciplinaryAction").text(); License license = new License(); license.setLicenseNumber(licenseNumber); ProviderProfile profile = new ProviderProfile(); license.setProfile(profile); User user = new User(); if (name.lastIndexOf(" ") > -1) { String firstName = name.substring(0, name.lastIndexOf(" ")).trim(); String lastName = name.substring(name.lastIndexOf(" "), name.length()).trim(); user.setFirstName(firstName); user.setLastName(lastName); } else { user.setLastName(name); } List<Address> addresses = new ArrayList<Address>(); addresses.add(parseAddress(address.trim())); profile.setAddresses(addresses); profile.setUser(user); LicenseType licType = new LicenseType(); licType.setName(licenseType); license.setType(licType); Date issueDate = parseDate(originalIssueDate, DATE_FORMAT); if (issueDate != null) { license.setOriginalIssueDate(issueDate); } Date expireDate = parseDate(expirationDate, DATE_FORMAT); if (expireDate != null) { license.setExpireDate(expireDate); } LicenseStatus status = new LicenseStatus(); status.setName(licenseStatus); license.setStatus(status); license.setDiscipline(!"No".equals(disciplinaryAction.trim())); return license; }
function
java
99,831
public static UnicodeSymbol FromTokenizedCodePoints(string tokenizedCodePoints) { Guard.AgainstNullArgument(nameof(tokenizedCodePoints), tokenizedCodePoints); UnicodeSymbol unicode = Empty; if (!tokenizedCodePoints.IsEmpty()) { int[] codePoints = split(tokenizedCodePoints) .Select(cp => Convert.ToInt32(cp.Trim(), CultureInfo.InvariantCulture)) .ToArray(); string symbol = codePoints.Aggregate(string.Empty, (current, codePoint) => current + char.ConvertFromUtf32(codePoint)); unicode = new UnicodeSymbol(codePoints, symbol); } return unicode; }
function
c#
99,832
public static void Write(this NetBuffer message, Matrix value) { message.Write(value.M11); message.Write(value.M12); message.Write(value.M13); message.Write(value.M14); message.Write(value.M21); message.Write(value.M22); message.Write(value.M23); message.Write(value.M24); message.Write(value.M31); message.Write(value.M32); message.Write(value.M33); message.Write(value.M34); message.Write(value.M41); message.Write(value.M42); message.Write(value.M43); message.Write(value.M44); }
function
c#
99,833
public static class GridUtil { </summary> <param name="mode"></param> <returns></returns> public static Func<int, int, int> SelectWrapFunction(WrapMode mode) { switch (mode) { case WrapMode.Clamp: return Clamp; case WrapMode.Repeat: return Repeat; case WrapMode.Mirror: return Mirror; } throw new NotSupportedException(); } /* </summary> <param name="index"></param> <param name="range"></param> <param name="mode"></param> <returns></returns> public static int Wrap(int index, int range, WrapMode mode) { switch (mode) { case WrapMode.Clamp: return Clamp(index, range); case WrapMode.Repeat: return Repeat(index, range); case WrapMode.Mirror: return Mirror(index, range); } throw new NotSupportedException(); } */ </summary> <param name="index"></param> <param name="range"></param> <param name="mode"></param> <returns></returns> public static int Wrap(int index, int range, WrapMode mode) { return mode == WrapMode.Repeat ? Repeat(index, range) : mode == WrapMode.Mirror ? Mirror(index, range) : Clamp(index, range); } </summary> <param name="i"></param> <param name="n"></param> <returns></returns> private static int Clamp(int i, int n) { return (i < 0) ? 0 : (i < n) ? i : n - 1; } </summary> <param name="i"></param> <param name="n"></param> <returns></returns> private static int Repeat(int i, int n) { i %= n; return (i < 0) ? i + n : i; } </summary> <param name="i"></param> <param name="n"></param> <returns></returns> private static int Mirror(int i, int n) { var n2 = n + n; i %= n2; if (i < 0) i += n2; return (i < n) ? i : n2 - i - 1; } </summary> <param name="x"></param> <param name="y"></param> <param name="nx"></param> <returns></returns> public static int FlattenIndices(int x, int y, int nx) { return x + y * nx; } </summary> <param name="x"></param> <param name="y"></param> <param name="z"></param> <param name="nx"></param> <param name="nxy"></param> <returns></returns> public static int FlattenIndices(int x, int y, int z, int nx, int nxy) { return x + y * nx + z * nxy; } </summary> <param name="index"></param> <param name="nx"></param> <returns></returns> public static (int, int) ExpandIndex(int index, int nx) { int y = index / nx; return (index - y * nx, y); } </summary> <param name="index"></param> <param name="nx"></param> <param name="nxy"></param> <returns></returns> public static (int, int, int) ExpandIndex(int index, int nx, int nxy) { int z = index / nxy; index -= z * nxy; int y = index / nx; return (index - y * nx, y, z); } }
class
c#
99,834
static void *memsys5Realloc(void *pPrior, int nBytes){ int nOld; void *p; assert( pPrior!=0 ); assert( (nBytes&(nBytes-1))==0 ); assert( nBytes>=0 ); if( nBytes==0 ){ return 0; } nOld = memsys5Size(pPrior); if( nBytes<=nOld ){ return pPrior; } memsys5Enter(); p = memsys5MallocUnsafe(nBytes); if( p ){ memcpy(p, pPrior, nOld); memsys5FreeUnsafe(pPrior); } memsys5Leave(); return p; }
function
c
99,835
async def _autoclose_bids(): async with Engine() as engine: async with engine.acquire() as conn: statuses = [BidStatus.NOTIFIED] daily_bids = await get_daily_bids(conn, statuses=statuses) bid_ids = [b.id for b in daily_bids] if bid_ids: logger.debug('Closing bids %s', bid_ids) await mark_bids_as(conn, bid_ids, BidStatus.CLOSED)
function
python
99,836
def load_combined_df( experiment_dirs: List[Path], experiment_labels: List[str], init_data_filename: str ) -> pd.DataFrame: dfs = [] for run_name, run_dir in zip(experiment_labels, experiment_dirs): run_df = load_experiment_df(run_dir, init_data_filename) run_df[RUN_NAME_COLUMN] = run_name dfs.append(run_df) return pd.concat(dfs)
function
python
99,837
@Override public void onChildViewDetachedFromWindow(@NonNull View view) { final RecyclerView.ViewHolder holder = recyclerView.getChildViewHolder(view); if (!(holder instanceof SwipeOpenViewHolder)) { return; } final SwipeOpenViewHolder swipeHolder = (SwipeOpenViewHolder) holder; if (prevSelected == swipeHolder) { prevSelected = null; } if (selected != null && swipeHolder == selected) { select(null, ACTION_STATE_IDLE); } else { callback.clearView(recyclerView, swipeHolder); endRecoverAnimation(swipeHolder); } }
function
java
99,838
def qac_stats(image, test = None, eps=None, box=None, region=None, pb=None, pbcut=0.8, edge=False, sratio=True): def text2array(text): a = text.split() b = np.zeros(len(a)) for i,ai in zip(range(len(a)),a): b[i] = float(ai) return b def arraydiff(a1,a2): delta = abs(a1-a2) idx = np.where(delta>0) return delta[idx]/a1[idx] def lel(name): return '\'' + name + '\'' qac_tag("plot") if not QAC.exists(image): print("QAC_STATS: missing %s " % image) return if QAC.iscasa(image + '/ANTENNA'): Qms = True tb.open(image) data = np.abs(tb.getcol('DATA')[0,:,:]) mean = data.mean() rms = data.std() min = data.min() max = data.max() flux = 0.0 tb.close() del data else: Qms = False maskarea = None if pbcut != None: if pb == None: pb = image[:image.rindex('.')] + '.pb' if QAC.iscasa(pb): maskarea = lel(pb) + '>' + str(pbcut) else: maskarea = lel(pb) + '>' + str(pbcut) if edge: nchan = imhead(image)['shape'][3] s0 = imstat(image,mask=maskarea,chans='1~%d' % (nchan-2),box=box,region=region) else: s0 = imstat(image,box=box,region=region,mask=maskarea) mean = s0['mean'][0] rms = s0['sigma'][0] min = s0['min'][0] max = s0['max'][0] if 'flux' in s0: flux = s0['flux'][0] else: flux = s0['sum'][0] test_new = "%s %s %s %s %s" % (repr(mean),repr(rms),repr(min),repr(max),repr(flux)) if test == None: test_out = "" report = False else: if eps == None: if test_new == test: test_out = "OK" report = False else: test_out = "FAILED regression" report = True else: v1 = text2array(test_new) v2 = text2array(test) delta = arraydiff(v1,v2) print(delta) if delta.max() < eps: test_out = "OK" report = False else: test_out = "FAILED regression delta=%g > %g" % (delta.max(),eps) report = True if sratio and not Qms: if QAC.iscasa(image,'Image'): data = QAC.casa2np(image) else: data = QAC.fits2np(image) sump = data[data > 0.0].sum() sumn = data[data < 0.0].sum() sratio = (sump + sumn) / (sump - sumn) srat = str(sratio) else: srat = "" msg1 = "QAC_STATS: %s" % (image) print("%s %s %s %s" % (msg1,test_new,srat,test_out)) if report: fmt1 = '%%-%ds' % (len(msg1)) msg2 = fmt1 % ' ' print("%s %s EXPECTED" % (msg2,test))
function
python
99,839
def initialize_symmertic_ratchets( self, shared_key: bytes, /, opt_public_key: Optional[X25519PublicKey] = None, opt_private_key: Optional[X25519PrivateKey] = None, ) -> None: r_set = RatchetSet() r_set.root_ratchet = InnerRatchet(shared_key) assert ( self.my_turn is not None ), "The order of intitialization must be resolved" if self.my_turn: r_set.send_ratchet = InnerRatchet(r_set.root_ratchet.turn()[0]) r_set.recv_ratchet = InnerRatchet(r_set.root_ratchet.turn()[0]) if opt_public_key is not None: self.ratchet_set = r_set self.rotate_dh_ratchet(opt_public_key) assert opt_private_key is None, "Bad initialization!!" else: r_set.recv_ratchet = InnerRatchet(r_set.root_ratchet.turn()[0]) r_set.send_ratchet = InnerRatchet(r_set.root_ratchet.turn()[0]) self.my_turn = True if opt_private_key is not None: r_set.dh_ratchet = opt_private_key else: r_set.dh_ratchet = generate_DH() assert opt_public_key is None, "Bad initialization!!" self.ratchet_set = r_set
function
python
99,840
class CommentPost extends Post { init() { super.init(); /** * If the post has been hidden, then this flag determines whether or not its * content has been expanded. * * @type {Boolean} */ this.revealContent = false; // Create an instance of the component that displays the post's author so // that we can force the post to rerender when the user card is shown. this.postUser = new PostUser({post: this.props.post}); this.subtree.check( () => this.postUser.cardVisible, () => this.isEditing() ); } content() { // Note: we avoid using JSX for the <ul> below because it results in some // weirdness in Mithril.js 0.1.x (see lb308/core#975). This workaround can // be reverted when we upgrade to Mithril 1.0. return super.content().concat([ <header className="Post-header">{m('ul', listItems(this.headerItems().toArray()))}</header>, <div className="Post-body"> {this.isEditing() ? <div className="Post-preview" config={this.configPreview.bind(this)}/> : m.trust(this.props.post.contentHtml())} </div> ]); } config(isInitialized, context) { super.config(...arguments); const contentHtml = this.isEditing() ? '' : this.props.post.contentHtml(); // If the post content has changed since the last render, we'll run through // all of the <script> tags in the content and evaluate them. This is // necessary because TextFormatter outputs them for e.g. syntax highlighting. if (context.contentHtml !== contentHtml) { this.$('.Post-body script').each(function() { eval.call(window, $(this).text()); }); } context.contentHtml = contentHtml; } isEditing() { return app.composer.component instanceof EditPostComposer && app.composer.component.props.post === this.props.post; } attrs() { const post = this.props.post; const attrs = super.attrs(); attrs.className += ' '+classList({ 'CommentPost': true, 'Post--hidden': post.isHidden(), 'Post--edited': post.isEdited(), 'revealContent': this.revealContent, 'editing': this.isEditing() }); return attrs; } configPreview(element, isInitialized, context) { if (isInitialized) return; // Every 50ms, if the composer content has changed, then update the post's // body with a preview. let preview; const updatePreview = () => { const content = app.composer.component.content(); if (preview === content) return; preview = content; s9e.TextFormatter.preview(preview || '', element); }; updatePreview(); const updateInterval = setInterval(updatePreview, 50); context.onunload = () => clearInterval(updateInterval); } /** * Toggle the visibility of a hidden post's content. */ toggleContent() { this.revealContent = !this.revealContent; } /** * Build an item list for the post's header. * * @return {ItemList} */ headerItems() { const items = new ItemList(); const post = this.props.post; const props = {post}; items.add('user', this.postUser.render(), 100); items.add('meta', PostMeta.component(props)); if (post.isEdited() && !post.isHidden()) { items.add('edited', PostEdited.component(props)); } // If the post is hidden, add a button that allows toggling the visibility // of the post's content. if (post.isHidden()) { items.add('toggle', ( Button.component({ className: 'Button Button--default Button--more', icon: 'ellipsis-h', onclick: this.toggleContent.bind(this) }) )); } return items; } }
class
javascript
99,841
def sort_and_merge_gzipped_csv_files( input_filenames, output_filename, sort_columns, ): header_line = get_header_line(input_filenames) sort_column_indices = get_column_indices(header_line, sort_columns) pipeline = "( {read_files} ) | ( echo {header_line}; {sort_by_columns} )".format( read_files=read_files(input_filenames, skip_lines=1), header_line=quote(header_line), sort_by_columns=sort_by_columns(sort_column_indices), ) pipeline += " | gzip" pipeline += " > {}".format(quote(output_filename)) env = os.environ.copy() env["LANG"] = "C" subprocess.check_call(pipeline, shell=True, env=env)
function
python
99,842
func (reader PluginConfigReaderWithError) GetStreamArray(key string, defaultValue []MessageStreamID) ([]MessageStreamID, error) { key = reader.config.registerKey(key) if reader.HasValue(key) { values, err := reader.GetStringArray(key, []string{}) if err != nil { return nil, err } streamArray := []MessageStreamID{} for _, streamName := range values { streamArray = append(streamArray, GetStreamID(streamName)) } return streamArray, nil } return defaultValue, nil }
function
go
99,843
static unsigned PNG_zGetBits(PNG_decode *d, unsigned num) { unsigned val; unsigned limit; unsigned mask; val = 0; limit = 1 << num; for (mask = 1; mask < limit; mask <<= 1) if (PNG_zGetBit(d)) val += mask; return val; }
function
c
99,844
def update_emissions(component, emissions): if not emissions: return if emissions['key'] == 'variable': emissions = choose_valid_dict(component, emissions) if type(emissions['key']) is not list: emissions['key'] = [emissions['key']] emissions['fitting_value'] = [emissions['fitting_value']] emissions['dependant_value'] = [emissions['dependant_value']] for this_index in range(len(emissions['key'])): dependant_value = get_dependant_value(component, emissions, this_index, 'fix_emissions') update_cost(component, emissions, this_index, dependant_value, 'Emissions')
function
python
99,845
internal bool TryStepToValue(IEnumerator<TKeyElement> keyElementEnumerator, out TValue value) { if (keyElementEnumerator == null) throw new ArgumentNullException(nameof(keyElementEnumerator)); while (keyElementEnumerator.MoveNext() && this.StepNext(keyElementEnumerator.Current)) { if (this.CurrentNode.HasValue) { value = this.CurrentNode.Value; return true; } } value = default(TValue); return false; }
function
c#
99,846
@Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testCreatingBuildWithinUserClassLoader() throws Exception { ClassLoader appClassLoader = getClass().getClassLoader(); Assume.assumeTrue(appClassLoader instanceof URLClassLoader); ClassLoader userClassLoader = new SpecifiedChildFirstUserClassLoader( HadoopPathBasedBulkFormatBuilder.class.getName(), appClassLoader, ((URLClassLoader) appClassLoader).getURLs()); Class<HadoopPathBasedBulkFormatBuilder> userHadoopFormatBuildClass = (Class<HadoopPathBasedBulkFormatBuilder>) userClassLoader.loadClass( HadoopPathBasedBulkFormatBuilder.class.getName()); Constructor<?> constructor = userHadoopFormatBuildClass.getConstructor( Path.class, HadoopPathBasedBulkWriter.Factory.class, Configuration.class, BucketAssigner.class); Object hadoopFormatBuilder = constructor.newInstance( new Path("/tmp"), new TestHadoopPathBasedBulkWriterFactory(), new Configuration(), new DateTimeBucketAssigner<>()); Buckets<String, String> buckets = (Buckets<String, String>) userHadoopFormatBuildClass .getMethod("createBuckets", int.class) .invoke(hadoopFormatBuilder, 0); assertNotNull(buckets); }
function
java
99,847
def multivariate_spike_distance(spike_trains, ti, te, N): d = np.zeros((N,)) n_trains = len(spike_trains) t = 0 for i, t1 in enumerate(spike_trains[:-1]): for t2 in spike_trains[i+1:]: tij, dij = bivariate_spike_distance(t1, t2, ti, te, N) if(i == 0): t = tij d = d + dij d = d / float(n_trains * (n_trains-1) /2) return t,d
function
python
99,848
def simulate_interdependent_effects(self, network_recovery_original): network_recovery = copy.deepcopy(network_recovery_original) resilience_metrics = resm.WeightedResilienceMetric() unique_time_stamps = sorted( list(network_recovery.event_table.time_stamp.unique()) ) print(unique_time_stamps) unique_time_differences = [ x - unique_time_stamps[i - 1] for i, x in enumerate(unique_time_stamps) ][1:] print(unique_time_differences) for index, time_stamp in enumerate(unique_time_stamps[:-1]): print(f"Simulating network conditions until {time_stamp} s") print( "Simulation time: ", network_recovery.network.wn.options.time.duration, "; Hydraulic time step: ", network_recovery.network.wn.options.time.hydraulic_timestep, "; Report time step: ", network_recovery.network.wn.options.time.report_timestep, ) network_recovery.update_directly_affected_components( network_recovery.network.wn.options.time.duration, network_recovery.network.wn.options.time.duration + unique_time_differences[index], ) power.run_power_simulation(network_recovery.network.pn) network_recovery.network.dependency_table.update_dependencies( network_recovery.network, network_recovery.network.wn.options.time.duration, network_recovery.network.wn.options.time.duration + unique_time_differences[index], ) wn_results = water.run_water_simulation(network_recovery.network.wn) "W_PMA2000"].round(decimals=4).values, "Total leak: ", wn_results.node["leak_demand"].sum()) resilience_metrics.calculate_node_details(network_recovery, wn_results) resilience_metrics.calculate_pump_flow(network_recovery, wn_results) resilience_metrics.calculate_power_load(network_recovery, time_stamp) resilience_metrics.calculate_pump_status(network_recovery, wn_results) # if index < len(unique_time_stamps) - 1: network_recovery.network.wn.options.time.duration += int( unique_time_differences[index] ) print("******************\n") return resilience_metrics
function
python
99,849
def add_trace(self, number: int = 1, s_parameter: str = "S21"): _, traces, _ = self.get_existing_traces() if number in traces: print('Trace exist. Please use another trace number or remove the current one with remove_trace(number).') else: logging.debug(__name__ + f": add trace{number} with S-parameter {s_parameter}") self.write(f"CALC1:MEAS{number}:DEF '{s_parameter}'") self.write(f"DISP:MEAS{number}:FEED 1") print('Trace is successfully created.')
function
python
99,850
bool IsOtherPowerUpActive( std::vector<PowerUp> &powerUps, std::string type ) { for( const PowerUp &powerUp : powerUps ) { if( powerUp.Activated ) if( powerUp.Type == type ) return true; } return false; }
function
c++
99,851
public CurrencyAmount delta(final ForexOptionSingleBarrier barrierOption, final BlackForexSmileProviderInterface data, final boolean directQuote) { ArgumentChecker.notNull(barrierOption, "barrierOption"); ArgumentChecker.notNull(data, "data"); ArgumentChecker.isTrue(data.checkCurrencies(barrierOption.getCurrency1(), barrierOption.getCurrency2()), "Option currencies not compatible with smile data"); final double deltaRelative = deltaRelative(barrierOption, data, directQuote); final ForexOptionVanilla underlyingOption = barrierOption.getUnderlyingOption(); return CurrencyAmount.of(underlyingOption.getUnderlyingForex().getCurrency2(), deltaRelative * Math.abs(underlyingOption.getUnderlyingForex().getPaymentCurrency1().getAmount())); }
function
java
99,852
@Deprecated public void registerAttribute(A attribute) { if (null == attribute) return; boolean listeningAlready = false; for (PropertyChangeListener listener : attribute.getPropertyChangeListeners(Attribute.QUALIFIER_NAME)) { if (ATTRIBUTE_WORKER == listener) { listeningAlready = true; break; } } if (!listeningAlready) { attribute.addPropertyChangeListener(Attribute.QUALIFIER_NAME, ATTRIBUTE_WORKER); } addAttributeByQualifier(attribute); addAttributeById(attribute); }
function
java
99,853
@Test void testMissingRequiredProperty() { LOG.info("testMissingRequiredProperty"); DSLMappingService m = null; try { String aadmTTL = RepositoryTestUtils.fileToString("dsl/snow/ide_snow_v3_required_property_missing.ttl"); m = new DSLMappingService(kb, aadmTTL,"", false,"snow","DSL","snow.ttl", ""); try { m.start(); } catch (ValidationException e) { List<ValidationModel> validationModels = e.validationModels; for (ValidationModel validationModel : validationModels) { LOG.info("validationModel" + validationModel.toJson()); } assertEquals(validationModels.size(),1); LOG.info("Test Passed: group_description required property is missing"); return; } catch (Exception e) { LOG.error(e.getMessage(), e); fail("Exception was thrown in start"); } } catch (Exception e) { LOG.error(e.getMessage(), e); fail("Exception was thrown"); } assertTrue(false); }
function
java
99,854
func NewCrankEventQueueInstruction( state ag_solanago.PublicKey, zetaGroup ag_solanago.PublicKey, market ag_solanago.PublicKey, eventQueue ag_solanago.PublicKey, dexProgram ag_solanago.PublicKey, serumAuthority ag_solanago.PublicKey) *CrankEventQueue { return NewCrankEventQueueInstructionBuilder(). SetStateAccount(state). SetZetaGroupAccount(zetaGroup). SetMarketAccount(market). SetEventQueueAccount(eventQueue). SetDexProgramAccount(dexProgram). SetSerumAuthorityAccount(serumAuthority) }
function
go
99,855
function cmpFailed(errMsg, hookConfig, extraArgs) { clearTimeout(hookConfig.timer); if (allowAuction) { storeConsentData(undefined); } exitModule(errMsg, hookConfig, extraArgs); }
function
javascript
99,856
static MergeEngine *vdbeMergeEngineNew(int nReader){ int N = 2; int nByte; MergeEngine *pNew; assert( nReader<=SORTER_MAX_MERGE_COUNT ); while( N<nReader ) N += N; nByte = sizeof(MergeEngine) + N * (sizeof(int) + sizeof(PmaReader)); pNew = sqlite3FaultSim(100) ? 0 : (MergeEngine*)sqlite3MallocZero(nByte); if( pNew ){ pNew->nTree = N; pNew->pTask = 0; pNew->aReadr = (PmaReader*)&pNew[1]; pNew->aTree = (int*)&pNew->aReadr[N]; } return pNew; }
function
c
99,857
def download(self, target_path, torrent_path = None, info_hash = None, close = False): def on_complete(task): owner = task.owner self.remove_task(task) if close: owner.close() task = TorrentTask( self, target_path, torrent_path = torrent_path, info_hash = info_hash ) task.load() task.connect_peers() task.bind("complete", on_complete) self.tasks.append(task) return task
function
python
99,858
BOOL MACIsTxReady(void) { BOOL result = TRUE; if (isRawRxMgmtInProgress()) { WFProcess(); return FALSE; } if ( !RawWindowReady[RAW_TX_ID] ) { SetRawWindowState(RAW_TX_ID, WF_RAW_UNMOUNTED); if ( g_encPtrRAWId[ENC_RD_PTR_ID] == RAW_TX_ID ) { g_encPtrRAWId[ENC_RD_PTR_ID] = RAW_INVALID_ID; } if ( g_encPtrRAWId[ENC_WT_PTR_ID] == RAW_TX_ID ) { g_encPtrRAWId[ENC_WT_PTR_ID] = RAW_INVALID_ID; } if (!AllocateDataTxBuffer(MCHP_DATA_PACKET_SIZE) ) { result = FALSE; } } return result; }
function
c
99,859
class Account: """ Expects an array of parameters according to the number of columns in the "accounts" database. """ # Class variables def __init__(self, account_info) -> None: self.account_info = account_info def create(self): # Database parameters path = os.path.join('databases', 'accounts.db') connection = sqlite3.connect(path) cursor = connection.cursor() # Creates the query values_holder = "" column_number = 8 for column in range(column_number): values_holder += '?,' values_holder = values_holder[:-1] query = f"INSERT INTO accounts * VALUES ({values_holder});" cursor.execute(query, self.account_info) connection.close() def modify(self): pass def update_balance(self, amount): pass
class
python
99,860
func flattenHostAddrs(addrPatterns string, defaultPort int) (addrs []string, err error) { var portHosts hostlist.HostGroups portHosts, err = hostsByPort(addrPatterns, defaultPort) if err != nil { return } for _, port := range portHosts.Keys() { hosts := strings.Split(portHosts[port].DerangedString(), ",") for _, host := range hosts { addrs = append(addrs, fmt.Sprintf("%s:%s", host, port)) } } sort.Strings(addrs) return }
function
go
99,861
def _create_flip(context, flip, port_fixed_ips): if port_fixed_ips: context.session.begin() try: ports = [val['port'] for val in port_fixed_ips.values()] flip = db_api.port_associate_ip(context, ports, flip, port_fixed_ips.keys()) for port_id in port_fixed_ips: fixed_ip = port_fixed_ips[port_id]['fixed_ip'] flip = db_api.floating_ip_associate_fixed_ip(context, flip, fixed_ip) flip_driver = registry.DRIVER_REGISTRY.get_driver() flip_driver.register_floating_ip(flip, port_fixed_ips) context.session.commit() except Exception: context.session.rollback() raise billing.notify(context, billing.IP_ASSOC, flip)
function
python
99,862
public static ID[][] loadLevel(BufferedImage image) { int w = image.getWidth(); int h = image.getHeight(); int blockSize; String color = new String(); ID[][] gameMap = new ID[w][h]; for (int x = 0; x < w; x++) { for (int y = 0; y < h; y++) { int pixel = image.getRGB(x, y); color = toHex(pixel); switch (color) { case ("440000"): gameMap[x][y] = ID.SpawnPlayer; break; case ("FFFFFF"): gameMap[x][y] = ID.Wall; break; case ("FF0000"): gameMap[x][y] = ID.AlienSpawner; break; case ("FF0044"): gameMap[x][y] = ID.AlienSpawnerBig; break; case ("EE2222"): gameMap[x][y] = ID.AlienGreen; break; case ("EE2244"): gameMap[x][y] = ID.AlienGreenKey; break; case ("EE4444"): gameMap[x][y] = ID.AlienPurple; break; case ("EE6666"): gameMap[x][y] = ID.AlienExplode; break; case ("77FFFF"): gameMap[x][y] = ID.HealingItemSmall; break; case ("770000"): gameMap[x][y] = ID.HealingItemBig; break; default: gameMap[x][y] = ID.Empty; break; } } } return gameMap; }
function
java
99,863
public static final String charSubst( final char target, final char desired, final String string ) { final StringBuffer buf = new StringBuffer( ( string == null ) ? "" : string ); for ( int indx = buf.length() - 1; indx >= 0; --indx ) { if ( buf.charAt( indx ) == target ) { buf.setCharAt( indx, desired ); } } return buf.toString(); }
function
java
99,864
UBOOL USeqAct_ActorFactory::UpdateOp(FLOAT deltaTime) { if (Factory != NULL) { if (RemainingDelay <= 0.f) { TArray<UObject**> objVars; GetObjectVars(objVars,TEXT("Spawn Point")); TArray<AActor*> spawnPoints; for (INT idx = 0; idx < objVars.Num(); idx++) { AActor *testPoint = Cast<AActor>(*(objVars(idx))); if (testPoint != NULL && !testPoint->bBlockActors) { spawnPoints.AddUniqueItem(testPoint); } } switch (PointSelection) { case PS_Random: for (INT idx = 0; idx < spawnPoints.Num(); idx++) { INT newIdx = idx + (appRand()%(spawnPoints.Num()-idx)); spawnPoints.SwapItems(newIdx,idx); } break; case PS_Normal: break; default: break; } AActor *newSpawn = NULL; while (spawnPoints.Num() > 0 && newSpawn == NULL) { AActor *point = spawnPoints.Pop(); newSpawn = Factory->CreateActor(GetLevel(),&(point->Location),&(point->Rotation)); if (newSpawn != NULL) { ScriptLog(FString::Printf(TEXT("Spawned %s at %s"),newSpawn->GetName(),point->GetName())); objVars.Empty(); GetObjectVars(objVars,TEXT("Spawned")); for (INT idx = 0; idx < objVars.Num(); idx++) { *(objVars(idx)) = newSpawn; } SpawnedCount++; for (INT linkIdx = 0; linkIdx < OutputLinks.Num(); linkIdx++) { OutputLinks(linkIdx).bHasImpulse = 1; } } } RemainingDelay = SpawnDelay; } else { RemainingDelay -= deltaTime; } return (SpawnedCount >= SpawnCount); } else { debugf(NAME_Warning,TEXT("Actor factory action %s has an invalid factory!"),GetFullName()); return 1; } }
function
c++
99,865
public static void dumpResponseContentAndFail(final WebResponse response, final String message) { if (response != null) { final SessionImpl session = (SessionImpl) Session.getCurrent(); final File resultsDirectory = new File(session.getResultsDirectory(), XltConstants.DUMP_OUTPUT_DIR); final File dumpDirectory = new File(resultsDirectory, session.getID()); final File responseDirectory = new File(dumpDirectory, XltConstants.DUMP_RESPONSES_DIR); responseDirectory.mkdirs(); final String requestId = getId(response); final File dumpFile = new File(responseDirectory, requestId + ".txt"); try (final InputStream inputStream = response.getContentAsStream(); FileOutputStream outputStream = new FileOutputStream(dumpFile)) { IOUtils.copy(inputStream, outputStream); Session.getCurrent().getValueLog().put("Defective response", dumpFile.getName()); Assert.fail(message + " -> check custom dump file (referenced by ID in result browser log)"); } catch (final Exception e) { XltLogger.runTimeLogger.error("Cannot write response content dump to file: " + dumpFile.getAbsolutePath(), e); } } else { XltLogger.runTimeLogger.error("Cannot dump from NULL response"); } }
function
java
99,866
public T FindMousedOverWithComponent<T>(IEnumerable<Element> container, Element ignoreElement = null) where T : Component { foreach (Element element in container) { if (isMouseOverable(element, ignoreElement) && element.Bounds.AbsoluteContains(MousePosition)) { T component; if (element.HasChildren) { component = FindMousedOverWithComponent<T>(element, ignoreElement); if (component != null) return component; } if (element.TryGetComponent(out component)) return component; } } return null; }
function
c#
99,867
protected boolean isPackageFragmentEmpty(IJavaElement element) throws JavaModelException { if (element instanceof IPackageFragment) { IPackageFragment fragment = (IPackageFragment) element; if (fragment.exists() && !(fragment.hasChildren() || fragment.getNonJavaResources().length > 0) && fragment.hasSubpackages()) return true; } return false; }
function
java
99,868
static void putIfNotNull(final Map<String, Object> map, final String key, final Object value) { boolean addValues = map != null && key != null && value != null; if (addValues) { map.put(key, value); } }
function
java
99,869
def image_deployment(server, config): try: server_serial_number = server['Server_serial_number'] os_type = config["OS_type"] image_path = config["HTTP_server_base_url"]+config["OS_image_name"] iso_file_check = is_iso_file_present(image_path) if not iso_file_check: print("ISO image not preset in the specified location") return False redfish_obj = create_redfish_object(server) if not redfish_obj: print("Error occured while creating redfish object for server {}".format(server_serial_number)) return False server_model = get_server_model(redfish_obj) if not server_model: print("Failed to get server model") return False if "Gen10" not in server_model: print("Server with serial number {} is not supported for this solution".format(server['Server_serial_number'])) return False if os_type == "rhel7": custom_iso_created = create_custom_iso_image_redhat(os_type, server, config, config['base_kickstart_filepath']) else: print("Unsupported OS type. Supported OS type is rhel7") print("Starting OS installation for server: " + server_serial_number) custom_image_path = get_custom_image_path(config["HTTP_file_path"], os_type, server_serial_number) custom_image_url = get_custom_image_url(config["HTTP_server_base_url"], os_type, server_serial_number) custom_kickstart_path = get_custom_kickstart_path(config["HTTP_file_path"], os_type, server_serial_number) custom_iso_present = is_iso_file_present(custom_image_url) if(custom_iso_created and custom_iso_present): unmount_virtual_media_iso(redfish_obj) mount_virtual_media_iso(redfish_obj, custom_image_url, True) power_staus = get_post_state(redfish_obj) if power_staus == "PowerOff": change_server_power_state(redfish_obj, server_serial_number, power_state="On") else: change_server_power_state(redfish_obj, server_serial_number, power_state="ForceRestart") is_complete = wait_for_os_deployment_to_complete(redfish_obj, server['Server_serial_number']) unmount_virtual_media_iso(redfish_obj) print("Deleting custom image for server {}".format(server_serial_number)) delete_file(custom_image_path) print("Deleting custom kickstart file for server {}".format(server_serial_number)) delete_file(custom_kickstart_path) print("Logging out of iLO for server {}".format(server_serial_number)) redfish_obj.redfish_client.logout() if is_complete: print("OS installation is complete for server {}".format(server_serial_number)) return True else: print("OS installation failed on server {}".format(server_serial_number)) return False else: print("Error in fetching custom image for server {}".format(server_serial_number)) return False except Exception as e: print("Failure: Error occurred while deploying image on server {}".format(e)) return False
function
python
99,870
void ConfigJsonParserCollector::SetupObjectFilter() { AddObjectType("global_system_config"); AddObjectType("structured_syslog_sla_profile"); AddObjectType("structured_syslog_application_record"); AddObjectType("structured_syslog_hostname_record"); AddObjectType("structured_syslog_tenant_record"); AddObjectType("structured_syslog_config"); AddObjectType("project"); AddObjectType("global_analytics_config"); AddObjectType("global_system_config"); AddObjectType("structured_syslog_message"); }
function
c++
99,871
void CANReadMsgObjData(unsigned int baseAdd, unsigned int msgNum, unsigned int* data, unsigned int ifReg) { DCANCommandRegSet(baseAdd, (DCAN_DAT_A_ACCESS | DCAN_DAT_B_ACCESS | DCAN_TXRQST_ACCESS | DCAN_CLR_INTPND | DCAN_ACCESS_CTL_BITS | DCAN_ACCESS_ARB_BITS | DCAN_ACCESS_MSK_BITS | DCAN_MSG_READ), msgNum, ifReg); DCANNewDataControl(baseAdd, DCAN_NEW_DAT_CLR, ifReg); DCANDataRead(baseAdd, data, ifReg); }
function
c
99,872
@Nullable private static String parseName(XmlResourceParser parser) throws IOException, XmlPullParserException { while (parser.next() != XmlPullParser.END_DOCUMENT) { int tagType = parser.getEventType(); String tagName = parser.getName(); if (tagType == XmlPullParser.TEXT) { return parser.getText(); } if (tagType == XmlPullParser.END_TAG && NAME_TAG.equals(tagName)) { break; } } return null; }
function
java
99,873
def _send_multi_stage_continue_headers(self, request, use_multiphase_commit, mime_documents_iter, **kwargs): if use_multiphase_commit: request.environ['wsgi.input'].\ send_hundred_continue_response() if not self._read_put_commit_message(mime_documents_iter): raise HTTPServerError(request=request)
function
python
99,874
pub fn config_param<K: AsRef<str>>(&self, key: K) -> Result<Option<String>, ()> { unsafe { let p = self.plugin_ptr as *mut flbgo_output_plugin; let key = CString::new(key.as_ref()).map_err(|_| ())?; let param = if let Some(f) = (*(*p).api).output_get_property { f(key.as_ptr() as _, (*p).o_ins as _) } else { return Ok(None); }; if !param.is_null() { let result = CStr::from_ptr(param).to_str().map_err(|_| ())?; return Ok(Some(result.to_owned())); } Ok(None) } }
function
rust
99,875
protected void populateView(InMemoryView v) throws SQLException { ResultSet rs = null; try { rs = dbMeta.getColumns(config.getDbCatalog(), config.getDbSchema(), v.getName(), null); while (rs.next()) { addColumn(v, rs); } } finally { DBUtil.close(rs, log); } }
function
java
99,876
@API(API.Status.EXPERIMENTAL) public class FlattenNestedAndPredicateRule extends PlannerRule<LogicalFilterExpression> { private static final BindingMatcher<QueryPredicate> nestedPredicateMatcher = anyPredicate(); private static final BindingMatcher<QueryPredicate> otherPredicateMatcher = anyPredicate(); private static final BindingMatcher<Quantifier.ForEach> innerQuantifierMatcher = forEachQuantifier(); private static final BindingMatcher<LogicalFilterExpression> root = RelationalExpressionMatchers.logicalFilterExpression( MatchOneAndRestMatcher.matchOneAndRest( QueryPredicateMatchers.andPredicate(all(nestedPredicateMatcher)), all(otherPredicateMatcher)), all(innerQuantifierMatcher)); public FlattenNestedAndPredicateRule() { super(root); } @Override public void onMatch(@Nonnull PlannerRuleCall call) { final PlannerBindings bindings = call.getBindings(); final List<? extends QueryPredicate> innerAndChildren = bindings.getAll(nestedPredicateMatcher); final List<? extends QueryPredicate> otherOuterAndChildren = bindings.getAll(otherPredicateMatcher); final Quantifier.ForEach innerQuantifier = call.get(innerQuantifierMatcher); List<QueryPredicate> allConjuncts = new ArrayList<>(innerAndChildren); allConjuncts.addAll(otherOuterAndChildren); call.yield(call.ref(new LogicalFilterExpression(allConjuncts, innerQuantifier))); } }
class
java
99,877
def _add_sentinel(self, query: torch.FloatTensor, key: torch.FloatTensor, mask: torch.ByteTensor) -> Tuple: batch_size, query_length, _ = query.size() if key is None: new_keys = self.sentinel.expand([batch_size, 1, self._key_vector_dim]) new_mask = self.sentinel.new_ones(batch_size, query_length, 1) return new_keys, new_mask sentinel = self.sentinel.expand([batch_size, 1, self._key_vector_dim]) new_keys = torch.cat([sentinel, key], dim=1) new_mask = None if mask is not None: sentinel_mask = mask.new_ones(batch_size, query_length, 1) new_mask = torch.cat([sentinel_mask, mask], dim=2) return new_keys, new_mask
function
python
99,878
@Override @AvailMethod A_Tuple o_ValuesAsTuple (final AvailObject object) { final int size = object.mapSize(); final MapIterable mapIterable = object.mapIterable(); return generateObjectTupleFrom( size, index -> mapIterable.next().value().makeImmutable()); }
function
java
99,879
def xy_czt(unit,step,q_points,q_mins,q_max): max_qxyz = (2*pi)/step intermediate = czt.zoomfft(unit,q_mins[0],q_max[0],q_points[0], max_qxyz[0],axis = 0) xy_czt_result = czt.zoomfft(intermediate,q_mins[1],q_max[1], q_points[1],max_qxyz[1],axis=1) return xy_czt_result
function
python
99,880
func (t *jsiiProxy_Table) AutoScaleReadCapacity(props *EnableScalingProps) IScalableTableAttribute { var returns IScalableTableAttribute _jsii_.Invoke( t, "autoScaleReadCapacity", []interface{}{props}, &returns, ) return returns }
function
go
99,881
public class GasPricePendingTransactionsSorter extends AbstractPendingTransactionsSorter { private final NavigableSet<TransactionInfo> prioritizedTransactions = new TreeSet<>( comparing(TransactionInfo::isReceivedFromLocalSource) .thenComparing(TransactionInfo::getGasPrice) .thenComparing(TransactionInfo::getSequence) .reversed()); public GasPricePendingTransactionsSorter( final int maxTransactionRetentionHours, final int maxPendingTransactions, final Clock clock, final MetricsSystem metricsSystem, final Supplier<BlockHeader> chainHeadHeaderSupplier, final Percentage priceBump) { super( maxTransactionRetentionHours, maxPendingTransactions, clock, metricsSystem, chainHeadHeaderSupplier, priceBump); } @Override public void manageBlockAdded(final Block block) { // nothing to do } @Override protected void doRemoveTransaction(final Transaction transaction, final boolean addedToBlock) { synchronized (lock) { final TransactionInfo removedTransactionInfo = pendingTransactions.remove(transaction.getHash()); if (removedTransactionInfo != null) { prioritizedTransactions.remove(removedTransactionInfo); removeTransactionTrackedBySenderAndNonce(transaction); incrementTransactionRemovedCounter( removedTransactionInfo.isReceivedFromLocalSource(), addedToBlock); } } } @Override protected Iterator<TransactionInfo> prioritizedTransactions() { return prioritizedTransactions.iterator(); } @Override protected TransactionAddedStatus addTransaction(final TransactionInfo transactionInfo) { Optional<Transaction> droppedTransaction = Optional.empty(); synchronized (lock) { if (pendingTransactions.containsKey(transactionInfo.getHash())) { return TransactionAddedStatus.ALREADY_KNOWN; } final TransactionAddedStatus transactionAddedStatus = addTransactionForSenderAndNonce(transactionInfo); if (!transactionAddedStatus.equals(TransactionAddedStatus.ADDED)) { return transactionAddedStatus; } prioritizedTransactions.add(transactionInfo); pendingTransactions.put(transactionInfo.getHash(), transactionInfo); if (pendingTransactions.size() > maxPendingTransactions) { final TransactionInfo toRemove = prioritizedTransactions.last(); doRemoveTransaction(toRemove.getTransaction(), false); droppedTransaction = Optional.of(toRemove.getTransaction()); } } notifyTransactionAdded(transactionInfo.getTransaction()); droppedTransaction.ifPresent(this::notifyTransactionDropped); return TransactionAddedStatus.ADDED; } }
class
java
99,882
def gradient_descent( bin_path: str, save_path: str, epsilon: float, update_nr: int, dt: float, sigmas: [float], st_obs_w: obspy.Stream, current_update: int = 0, prior_crfl_filepath: str = None, alphas: [float] = [1e-6, 1e-5, 5e-5, 1e-4, 5e-4, 1e-3, 1e-2, 1e-1], fmin: float = None, fmax: float = None, phases: [str] = ["P", "S", "P", "S", "S"], comps: [str] = ["Z", "T", "R", "Z", "R"], t_pres: [int] = [1, 1, 1, 1, 1], t_posts: [int] = [30, 30, 30, 30, 30], ): save_path_OG = save_path if current_update != 0: prev_update = current_update - 1 prev_it = max( [ int(f.strip("It_")) for f in listdir(join(save_path_OG, f"Update_{prev_update}")) if f.startswith("It_") ] ) prior_crfl_filepath = join( save_path_OG, f"Update_{prev_update}", f"It_{prev_it}", "crfl.dat" ) prev_m0 = [ f for f in listdir(join(save_path, f"Update_{prev_update}")) if f.startswith("m1_") if isfile(join(save_path, f"Update_{prev_update}", f)) ][0] m0 = _np.load(join(save_path_OG, f"Update_{prev_update}", prev_m0,)) else: with open(prior_crfl_filepath, "r") as f: data = f.readlines() fm = _np.array(data[-8].split(), dtype=float) m0 = _np.array( [fm[5], fm[0], fm[3], fm[2], -fm[4] + 0, -fm[1] + 0, float(data[9].split()[0])] ) while current_update < update_nr: if not exists(join(save_path_OG, f"Update_{current_update}")): makedirs(join(save_path_OG, f"Update_{current_update}")) save_path = join(save_path_OG, f"Update_{current_update}") src_str = _Gradient.SRC_STR( binary_file_path=bin_path, prior_dat_filepath=prior_crfl_filepath, save_folder=save_path, phases=phases, components=comps, t_pres=t_pres, t_posts=t_posts, depth=True, vpvs=False, fmin=fmin, fmax=fmax, dt=dt, sigmas=sigmas, zerophase=False, start_it=0, ) dxi_dms = _np.zeros((len(m0), 1)) if not isfile(join(save_path, "dxi_dms.npy")): dxi_dm = _af( m0, src_str.misfit, epsilon * _np.array( [ _np.mean(m0[:-1]), _np.mean(m0[:-1]), _np.mean(m0[:-1]), _np.mean(m0[:-1]), _np.mean(m0[:-1]), _np.mean(m0[:-1]), 0.1 * m0[-1], ] ), st_obs_w, ) dxi_dms[:, 0] = dxi_dm _np.save(join(save_path, "dxi_dms.npy"), dxi_dms) else: print("dxi_dms.npy already exists in this folder, reads in the existing file") dxi_dms = _np.load(join(save_path, "dxi_dms.npy")) if not isfile(join(save_path, f"m1s_{epsilon}.npy")): m1s = _np.zeros((len(m0), len(alphas))) X1s = _np.zeros(len(alphas)) for i, alpha in enumerate(alphas): m1s[:, i] = m0 - dxi_dm * alpha X1s[i] = src_str.misfit(m1s[:, i], st_obs_w) _np.save(join(save_path, f"m1s_{epsilon}.npy"), m1s) _np.save(join(save_path, f"X1s_{epsilon}.npy"), X1s) else: m1s = _np.load(join(save_path, f"m1s_{epsilon}.npy")) X1s = _np.load(join(save_path, f"X1s_{epsilon}.npy")) min_misfit = X1s.argmin() min_alpha = alphas[min_misfit] m1 = m1s[:, min_misfit] _np.save(join(save_path, f"misfit.npy"), X1s.min()) _np.save(join(save_path, f"alpha.npy"), min_alpha) _np.save(join(save_path, f"m1_eps_{epsilon}_alpha_{min_alpha}.npy"), m1) st_m1 = src_str.forward(m1) st_m1.write(join(save_path, "st_m1.mseed"), format="MSEED") update_it = src_str.it - 1 print(f"this is the iteration used for next update: {update_it}") prior_crfl_filepath = join( save_path_OG, f"Update_{current_update}", f"It_{update_it}", "crfl.dat" ) current_update += 1 m0 = m1
function
python
99,883
func (c *Controller) rollingUpdate(cluster *corev1alpha1.StorageCluster, hash string) error { nodeToStoragePods, err := c.getNodeToStoragePods(cluster) if err != nil { return fmt.Errorf("couldn't get node to storage pod mapping for storage cluster %v: %v", cluster.Name, err) } _, oldPods := c.getAllStorageClusterPods(cluster, nodeToStoragePods, hash) maxUnavailable, numUnavailable, err := c.getUnavailableNumbers(cluster, nodeToStoragePods) if err != nil { return fmt.Errorf("couldn't get unavailable numbers: %v", err) } oldAvailablePods, oldUnavailablePods := splitByAvailablePods(oldPods) var oldPodsToDelete []string logrus.Debugf("Marking all unavailable old pods for deletion") for _, pod := range oldUnavailablePods { if pod.DeletionTimestamp != nil { continue } logrus.Debugf("Marking pod %s/%s for deletion", cluster.Name, pod.Name) oldPodsToDelete = append(oldPodsToDelete, pod.Name) } logrus.Debugf("Marking old pods for deletion") for _, pod := range oldAvailablePods { if numUnavailable >= maxUnavailable { logrus.Debugf("Number of unavailable StorageCluster pods: %d, is equal "+ "to or exceeds allowed maximum: %d", numUnavailable, maxUnavailable) break } logrus.Debugf("Marking pod %s/%s for deletion", cluster.Name, pod.Name) oldPodsToDelete = append(oldPodsToDelete, pod.Name) numUnavailable++ } return c.syncNodes(cluster, oldPodsToDelete, []string{}, hash) }
function
go
99,884
public abstract class GraphTopology<X extends Tester> extends AbstractTopology<X> { protected GraphTopology(String name) { super(name); } protected <N extends Source<T>, T> TStream<T> sourceStream(N sourceOp) { return new ConnectorStream<GraphTopology<X>, T>(this, graph().source(sourceOp)); } @Override public <T> TStream<T> source(Supplier<Iterable<T>> data) { data = Functions.synchronizedSupplier(data); return sourceStream(new SupplierSource<>(data)); } @Override public <T> TStream<T> poll(Supplier<T> data, long period, TimeUnit unit) { data = Functions.synchronizedSupplier(data); return sourceStream(new SupplierPeriodicSource<>(period, unit, data)); } @Override public <T> TStream<T> events(Consumer<Consumer<T>> eventSetup) { TStream<T> rawEvents = sourceStream(new Events<>(eventSetup)); return PlumbingStreams.isolate(rawEvents, true); } }
class
java
99,885
_unwatchScripts (reset = false) { if (this._fsWatcher) { this._fsWatcher.close() this._fsWatcher = null if (reset) { this._watchScripts() } } }
function
javascript
99,886
def buildSyntheticSigData(path, cohort=239673, training=0.7, groups=None): diagnosis_map = { "borderline": -1, "healthy": 0, "bipolar": 1 } if groups is not None: groups = [diagnosis_map[group] for group in groups] signatures = np.genfromtxt(os.path.join(path, "cohort_" + str(cohort) + "_sigs.pickle"), delimiter=',') diagnoses = np.genfromtxt(os.path.join(path, "cohort_" + str(cohort) + "_diagnosis.pickle"), delimiter=',') data = [] for i, (diagnosis, signature) in enumerate(zip(diagnoses, signatures)): if groups is not None and diagnosis not in groups: continue p = Participant(signature, i, int(diagnosis+1), None) data.append(p) random.shuffle(data) training_set = data[0:int(training * len(data))] out_of_sample = data[int(training * len(data)):len(data)] return training_set, out_of_sample
function
python
99,887
function bindCustomizerChanges(callback) { if (!isInCustomizer()) { return; } SETTINGS_TO_STATE_MAP.forEach((jsName, phpName) => { window.wp.customize(phpName, value => { value.bind(function (newValue) { const newOvelayOptions = { [jsName]: newValue }; window[_constants__WEBPACK_IMPORTED_MODULE_0__ .W1].showResults = true; window[_constants__WEBPACK_IMPORTED_MODULE_0__ .W1].overlayOptions = { ...window[_constants__WEBPACK_IMPORTED_MODULE_0__ .W1].overlayOptions, ...newOvelayOptions }; callback && callback(newOvelayOptions); }); }); }); }
function
javascript
99,888
private void SetStartupRegistry(bool setStartup = false) { Microsoft.Win32.RegistryKey key = Microsoft.Win32.Registry.CurrentUser.OpenSubKey("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run", true); Assembly curAssembly = Assembly.GetExecutingAssembly(); if (setStartup) { try { key.SetValue(curAssembly.GetName().Name, curAssembly.Location); } catch (Exception ex) { StartupToggle.IsChecked = false; Logger.Error(ex.ToString()); MessageBox.Show("Failed to setup startup", Properties.Resources.txtLivelyErrorMsgTitle); } } else { try { key.DeleteValue(curAssembly.GetName().Name, false); } catch (Exception ex) { Logger.Error(ex.ToString()); } } key.Close(); }
function
c#
99,889
public class ChunkedResponse implements IChunkedResponse { protected IChunkedRequest reference; protected byte[] payload; protected boolean deflated; protected int payloadSize; /** * Needed for serialization */ public ChunkedResponse() { super(); } public ChunkedResponse(IChunkedRequest reference, byte[] payload, boolean deflated, int payloadSize) { super(); this.reference = reference; this.payload = payload; this.deflated = deflated; this.payloadSize = payloadSize; } @Override public IChunkedRequest getReference() { return reference; } public void setReference(IChunkedRequest reference) { this.reference = reference; } @Override public byte[] getPayload() { return payload; } public void setPayload(byte[] payload) { this.payload = payload; } @Override public int getPayloadSize() { return payloadSize; } public void setPayloadSize(int payloadSize) { this.payloadSize = payloadSize; } @Override public boolean isDeflated() { return deflated; } public void setDeflated(boolean deflated) { this.deflated = deflated; } }
class
java
99,890
def visualize(x, labels, to_show=6, num_col=3, predictions=None, test=False): import matplotlib.pyplot as plt If to_show//num_col == 0 then it means num_col is greater then to_show increment to_show to increment to_show set num_row to 1 num_row = to_show // num_col if to_show // num_col != 0 else 1 `to_show` must be an integral multiple of `num_col` we found num_row and we have num_col to increment or decrement to_show to make it integral multiple of `num_col` simply set it equal to num_row * num_col to_show = num_row * num_col Plot the images fig, axes = plt.subplots(num_row, num_col, figsize=(8, 2)) for i in range(to_show): If the number of rows is 1, the axes array is one-dimensional if num_row == 1: ax = axes[i % num_col] else: ax = axes[i // num_col, i % num_col] ax.imshow(tf.concat([x[0][i], x[1][i]], axis=1), cmap="gray") ax.set_axis_off() if test: ax.set_title("True: {} | Pred: {:.5f}".format(labels[i], predictions[i][0])) else: ax.set_title("Label: {}".format(labels[i])) if test: plt.tight_layout(rect=(0, 0, 1.9, 1.9), w_pad=0.0) else: plt.tight_layout(rect=(0, 0, 1.5, 1.5)) plt.show()
function
python
99,891
void Read_n( int* n_p , int* local_n_p , int my_rank , int comm_sz , MPI_Comm comm ) { int local_ok = 1; char *fname = "Read_n"; if (my_rank == 0) { printf("What's the order of the vectors?\n"); scanf("%d", n_p); } MPI_Bcast(n_p, 1, MPI_INT, 0, comm); if (*n_p <= 0 || *n_p % comm_sz != 0) local_ok = 0; Check_for_error(local_ok, fname, "n should be > 0 and evenly divisible by comm_sz", comm); *local_n_p = *n_p/comm_sz; }
function
c++
99,892
func (sc *serverConn) startFrameWrite(wr FrameWriteRequest) { sc.serveG.check() if sc.writingFrame { panic("internal error: can only be writing one frame at a time") } st := wr.stream if st != nil { switch st.state { case stateHalfClosedLocal: panic("internal error: attempt to send frame on half-closed-local stream") case stateClosed: if st.sentReset || st.gotReset { sc.scheduleFrameWrite() return } panic(fmt.Sprintf("internal error: attempt to send a write %v on a closed stream", wr)) } } if wpp, ok := wr.write.(*writePushPromise); ok { var err error wpp.promisedID, err = wpp.allocatePromisedID() if err != nil { sc.writingFrameAsync = false if wr.done != nil { wr.done <- err } return } } sc.writingFrame = true sc.needsFrameFlush = true if wr.write.staysWithinBuffer(sc.bw.Available()) { sc.writingFrameAsync = false err := wr.write.writeFrame(sc) sc.wroteFrame(frameWriteResult{wr, err}) } else { sc.writingFrameAsync = true go sc.writeFrameAsync(wr) } }
function
go
99,893
func NewTicker(d time.Duration) *Ticker { if d < time.Microsecond { panic(errors.New("non-positive interval for NewTicker")) } c := make(chan time.Time, 1) t := &Ticker{ C: c, r: timer{ when: when(d), period: int64(d), f: f, arg: c, }, } startTimer(&t.r) return t }
function
go
99,894
def _infer(self, data: Dict) -> Tuple[List, bool, str]: features = data["features"] opunit = data["opunit"] model_path = data["model_path"] if not isinstance(opunit, str): return [], False, "INVALID_OPUNIT" try: opunit = OpUnit[opunit] except KeyError as e: logging.error(f"{opunit} is not a valid Opunit name") return [], False, "INVALID_OPUNIT" features = np.array(features) logging.debug(f"Using model on {opunit}") model_map = self._load_model_map(model_path) if model_map is None: logging.error( f"Model map at {str(model_path)} has not been trained") return [], False, "MODEL_MAP_NOT_TRAINED" model = model_map[opunit] if model is None: logging.error(f"Model for {opunit} doesn't exist") return [], False, "MODEL_NOT_FOUND" y_pred = model.predict(features) return y_pred.tolist(), True, ""
function
python
99,895
void GeneratePartitionSchema(const Schema& schema, const vector<pair<vector<string>, int>>& hash_partitions, const vector<string>& range_partition_columns, PartitionSchema* partition_schema) { PartitionSchemaPB partition_schema_pb; for (const auto& col_names_and_num_buckets : hash_partitions) { auto* hash_dimension_pb = partition_schema_pb.add_hash_schema(); hash_dimension_pb->set_num_buckets(col_names_and_num_buckets.second); hash_dimension_pb->set_seed(0); for (const auto& col_name : col_names_and_num_buckets.first) { auto* column_pb = hash_dimension_pb->add_columns(); int col_idx = schema.find_column(col_name); column_pb->set_id(col_idx); column_pb->set_name(col_name); } } if (!range_partition_columns.empty()) { auto* range_schema = partition_schema_pb.mutable_range_schema(); for (const auto& range_column : range_partition_columns) { range_schema->add_columns()->set_name(range_column); } } CHECK_OK(PartitionSchema::FromPB(partition_schema_pb, schema, partition_schema)); }
function
c++
99,896
func NewReplicaStatusDataAllOf(clusterId string, topicName string, brokerId int32, partitionId int32, isLeader bool, isObserver bool, isIsrEligible bool, isInIsr bool, isCaughtUp bool, logStartOffset int64, logEndOffset int64, lastCaughtUpTimeMs int64, lastFetchTimeMs int64) *ReplicaStatusDataAllOf { this := ReplicaStatusDataAllOf{} this.ClusterId = clusterId this.TopicName = topicName this.BrokerId = brokerId this.PartitionId = partitionId this.IsLeader = isLeader this.IsObserver = isObserver this.IsIsrEligible = isIsrEligible this.IsInIsr = isInIsr this.IsCaughtUp = isCaughtUp this.LogStartOffset = logStartOffset this.LogEndOffset = logEndOffset this.LastCaughtUpTimeMs = lastCaughtUpTimeMs this.LastFetchTimeMs = lastFetchTimeMs return &this }
function
go
99,897
@Nullable public ArrayList<JellowIcon> query(String iconTitle) { ArrayList<JellowIcon>list =new ArrayList<>(); String selectQuery = iconTitle+"%"; Gson gson = new Gson(); Icon icon; ArrayList<VerbiageModel> vList = new ArrayList<>( database.verbiageDao().getVerbiageListByTitle(selectQuery,languageCode)); for(VerbiageModel model:vList){ icon = gson.fromJson(model.getIcon(), Icon.class); if(model.getIconId().length()>=12) list.add(new JellowIcon(model.getIconId(), icon.getDisplay_Label().replace("…",""), icon.getSpeech_Label(), model.getEventTag())); } return list; }
function
java
99,898
def from_string(self, string, doc_format=None): if self.parser == 'XML': self.doc = xmlparser.XMLReader().from_string(string) if self.show_warnings: self._validation_warning() return self.doc if self.parser == 'YAML': try: self.parsed_doc = yaml.safe_load(string) except yaml.parser.ParserError as err: print(err) return self.doc = DictReader().to_odml(self.parsed_doc) if self.show_warnings: self._validation_warning() return self.doc if self.parser == 'JSON': try: self.parsed_doc = json.loads(string) except ValueError as err: print("JSON Decoder Error: %s" % err) return self.doc = DictReader().to_odml(self.parsed_doc) if self.show_warnings: self._validation_warning() return self.doc if self.parser == 'RDF': if not doc_format: raise ValueError("Format of the rdf file was not specified") self.doc = RDFReader().from_string(string, doc_format) for doc in self.doc: report = Validation(doc).report() if report: msg = "The loaded Document contains unresolved issues." msg += " Run the Documents 'validate' method to access them.\n%s" % report warnings.warn(msg) return self.doc
function
python
99,899