_id
stringlengths
2
6
partition
stringclasses
3 values
text
stringlengths
87
6.4k
title
stringclasses
1 value
language
stringclasses
1 value
meta_information
dict
d20099
valid
def encode_list(key, list_): # type: (str, Iterable) -> Dict[str, str] """ Converts a list into a space-separated string and puts it in a dictionary :param key: Dictionary key to store the list :param list_: A list of objects :return: A dictionary key->string or an empty dictionary """ if not list_: return {} return {key: " ".join(str(i) for i in list_)}
PYTHON
{ "dummy_field": "" }
d20100
valid
def resize_by_area(img, size): """image resize function used by quite a few image problems.""" return tf.to_int64( tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA))
PYTHON
{ "dummy_field": "" }
d20101
valid
def fillna(series_or_arr, missing_value=0.0): """Fill missing values in pandas objects and numpy arrays. Arguments --------- series_or_arr : pandas.Series, numpy.ndarray The numpy array or pandas series for which the missing values need to be replaced. missing_value : float, int, str The value to replace the missing value with. Default 0.0. Returns ------- pandas.Series, numpy.ndarray The numpy array or pandas series with the missing values filled. """ if pandas.notnull(missing_value): if isinstance(series_or_arr, (numpy.ndarray)): series_or_arr[numpy.isnan(series_or_arr)] = missing_value else: series_or_arr.fillna(missing_value, inplace=True) return series_or_arr
PYTHON
{ "dummy_field": "" }
d20105
test
def _process_and_sort(s, force_ascii, full_process=True): """Return a cleaned string with token sorted.""" # pull tokens ts = utils.full_process(s, force_ascii=force_ascii) if full_process else s tokens = ts.split() # sort tokens and join sorted_string = u" ".join(sorted(tokens)) return sorted_string.strip()
PYTHON
{ "dummy_field": "" }
d20107
test
def empty(self, name, **kwargs): """Create an array. Keyword arguments as per :func:`zarr.creation.empty`.""" return self._write_op(self._empty_nosync, name, **kwargs)
PYTHON
{ "dummy_field": "" }
d20108
test
def is_iterable_but_not_string(obj): """ Determine whether or not obj is iterable but not a string (eg, a list, set, tuple etc). """ return hasattr(obj, '__iter__') and not isinstance(obj, str) and not isinstance(obj, bytes)
PYTHON
{ "dummy_field": "" }
d20111
test
def page_align_content_length(length): # type: (int) -> int """Compute page boundary alignment :param int length: content length :rtype: int :return: aligned byte boundary """ mod = length % _PAGEBLOB_BOUNDARY if mod != 0: return length + (_PAGEBLOB_BOUNDARY - mod) return length
PYTHON
{ "dummy_field": "" }
d20113
test
def __replace_all(repls: dict, input: str) -> str: """ Replaces from a string **input** all the occurrences of some symbols according to mapping **repls**. :param dict repls: where #key is the old character and #value is the one to substitute with; :param str input: original string where to apply the replacements; :return: *(str)* the string with the desired characters replaced """ return re.sub('|'.join(re.escape(key) for key in repls.keys()), lambda k: repls[k.group(0)], input)
PYTHON
{ "dummy_field": "" }
d20114
test
def is_iterable_of_int(l): r""" Checks if l is iterable and contains only integral types """ if not is_iterable(l): return False return all(is_int(value) for value in l)
PYTHON
{ "dummy_field": "" }
d20119
test
def _return_comma_list(self, l): """ get a list and return a string with comma separated list values Examples ['to', 'ta'] will return 'to,ta'. """ if isinstance(l, (text_type, int)): return l if not isinstance(l, list): raise TypeError(l, ' should be a list of integers, \ not {0}'.format(type(l))) str_ids = ','.join(str(i) for i in l) return str_ids
PYTHON
{ "dummy_field": "" }
d20120
test
def asynchronous(function, event): """ Runs the function asynchronously taking care of exceptions. """ thread = Thread(target=synchronous, args=(function, event)) thread.daemon = True thread.start()
PYTHON
{ "dummy_field": "" }
d20126
test
def url_host(url: str) -> str: """ Parses hostname from URL. :param url: URL :return: hostname """ from urllib.parse import urlparse res = urlparse(url) return res.netloc.split(':')[0] if res.netloc else ''
PYTHON
{ "dummy_field": "" }
d20127
test
def flatten_multidict(multidict): """Return flattened dictionary from ``MultiDict``.""" return dict([(key, value if len(value) > 1 else value[0]) for (key, value) in multidict.iterlists()])
PYTHON
{ "dummy_field": "" }
d20129
test
def to_dotfile(G: nx.DiGraph, filename: str): """ Output a networkx graph to a DOT file. """ A = to_agraph(G) A.write(filename)
PYTHON
{ "dummy_field": "" }
d20135
test
def compute_centroid(points): """ Computes the centroid of set of points Args: points (:obj:`list` of :obj:`Point`) Returns: :obj:`Point` """ lats = [p[1] for p in points] lons = [p[0] for p in points] return Point(np.mean(lats), np.mean(lons), None)
PYTHON
{ "dummy_field": "" }
d20141
test
def _validate(data, schema, ac_schema_safe=True, **options): """ See the descritpion of :func:`validate` for more details of parameters and return value. Validate target object 'data' with given schema object. """ try: jsonschema.validate(data, schema, **options) except (jsonschema.ValidationError, jsonschema.SchemaError, Exception) as exc: if ac_schema_safe: return (False, str(exc)) # Validation was failed. raise return (True, '')
PYTHON
{ "dummy_field": "" }
d20142
test
def access_token(self): """ WeChat access token """ access_token = self.session.get(self.access_token_key) if access_token: if not self.expires_at: # user provided access_token, just return it return access_token timestamp = time.time() if self.expires_at - timestamp > 60: return access_token self.fetch_access_token() return self.session.get(self.access_token_key)
PYTHON
{ "dummy_field": "" }
d20143
test
def str2int(num, radix=10, alphabet=BASE85): """helper function for quick base conversions from strings to integers""" return NumConv(radix, alphabet).str2int(num)
PYTHON
{ "dummy_field": "" }
d20144
test
def drag_and_drop(self, droppable): """ Performs drag a element to another elmenet. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).drag_and_drop(self._element, droppable._element).perform()
PYTHON
{ "dummy_field": "" }
d20146
test
def check_git(): """Check if git command is available.""" try: with open(os.devnull, "wb") as devnull: subprocess.check_call(["git", "--version"], stdout=devnull, stderr=devnull) except: raise RuntimeError("Please make sure git is installed and on your path.")
PYTHON
{ "dummy_field": "" }
d20147
test
def prt_nts(data_nts, prtfmt=None, prt=sys.stdout, nt_fields=None, **kws): """Print list of namedtuples into a table using prtfmt.""" prt_txt(prt, data_nts, prtfmt, nt_fields, **kws)
PYTHON
{ "dummy_field": "" }
d20149
test
def count_list(the_list): """ Generates a count of the number of times each unique item appears in a list """ count = the_list.count result = [(item, count(item)) for item in set(the_list)] result.sort() return result
PYTHON
{ "dummy_field": "" }
d20151
test
def _kbhit_unix() -> bool: """ Under UNIX: is a keystroke available? """ dr, dw, de = select.select([sys.stdin], [], [], 0) return dr != []
PYTHON
{ "dummy_field": "" }
d20155
test
def url_host(url: str) -> str: """ Parses hostname from URL. :param url: URL :return: hostname """ from urllib.parse import urlparse res = urlparse(url) return res.netloc.split(':')[0] if res.netloc else ''
PYTHON
{ "dummy_field": "" }
d20156
test
def asMaskedArray(self): """ Creates converts to a masked array """ return ma.masked_array(data=self.data, mask=self.mask, fill_value=self.fill_value)
PYTHON
{ "dummy_field": "" }
d20158
test
def csv_to_numpy(string_like, dtype=None): # type: (str) -> np.array """Convert a CSV object to a numpy array. Args: string_like (str): CSV string. dtype (dtype, optional): Data type of the resulting array. If None, the dtypes will be determined by the contents of each column, individually. This argument can only be used to 'upcast' the array. For downcasting, use the .astype(t) method. Returns: (np.array): numpy array """ stream = StringIO(string_like) return np.genfromtxt(stream, dtype=dtype, delimiter=',')
PYTHON
{ "dummy_field": "" }
d20160
test
def loads(s, model=None, parser=None): """Deserialize s (a str) to a Python object.""" with StringIO(s) as f: return load(f, model=model, parser=parser)
PYTHON
{ "dummy_field": "" }
d20166
test
def EvalGaussianPdf(x, mu, sigma): """Computes the unnormalized PDF of the normal distribution. x: value mu: mean sigma: standard deviation returns: float probability density """ return scipy.stats.norm.pdf(x, mu, sigma)
PYTHON
{ "dummy_field": "" }
d20168
test
def _rescale_array(self, array, scale, zero): """ Scale the input array """ if scale != 1.0: sval = numpy.array(scale, dtype=array.dtype) array *= sval if zero != 0.0: zval = numpy.array(zero, dtype=array.dtype) array += zval
PYTHON
{ "dummy_field": "" }
d20169
test
def upcaseTokens(s,l,t): """Helper parse action to convert tokens to upper case.""" return [ tt.upper() for tt in map(_ustr,t) ]
PYTHON
{ "dummy_field": "" }
d20170
test
def build(self, **kwargs): """Build the lexer.""" self.lexer = ply.lex.lex(object=self, **kwargs)
PYTHON
{ "dummy_field": "" }
d20172
test
def set_trace(): """Start a Pdb instance at the calling frame, with stdout routed to sys.__stdout__.""" # https://github.com/nose-devs/nose/blob/master/nose/tools/nontrivial.py pdb.Pdb(stdout=sys.__stdout__).set_trace(sys._getframe().f_back)
PYTHON
{ "dummy_field": "" }
d20176
test
def machine_info(): """Retrieve core and memory information for the current machine. """ import psutil BYTES_IN_GIG = 1073741824.0 free_bytes = psutil.virtual_memory().total return [{"memory": float("%.1f" % (free_bytes / BYTES_IN_GIG)), "cores": multiprocessing.cpu_count(), "name": socket.gethostname()}]
PYTHON
{ "dummy_field": "" }
d20177
test
def indexes_equal(a: Index, b: Index) -> bool: """ Are two indexes equal? Checks by comparing ``str()`` versions of them. (AM UNSURE IF THIS IS ENOUGH.) """ return str(a) == str(b)
PYTHON
{ "dummy_field": "" }
d20179
test
def cleanup(): """Cleanup the output directory""" if _output_dir and os.path.exists(_output_dir): log.msg_warn("Cleaning up output directory at '{output_dir}' ..." .format(output_dir=_output_dir)) if not _dry_run: shutil.rmtree(_output_dir)
PYTHON
{ "dummy_field": "" }
d20181
test
def urlencoded(body, charset='ascii', **kwargs): """Converts query strings into native Python objects""" return parse_query_string(text(body, charset=charset), False)
PYTHON
{ "dummy_field": "" }
d20182
test
def each_img(dir_path): """ Iterates through each image in the given directory. (not recursive) :param dir_path: Directory path where images files are present :return: Iterator to iterate through image files """ for fname in os.listdir(dir_path): if fname.endswith('.jpg') or fname.endswith('.png') or fname.endswith('.bmp'): yield fname
PYTHON
{ "dummy_field": "" }
d20187
test
def sanitize_word(s): """Remove non-alphanumerical characters from metric word. And trim excessive underscores. """ s = re.sub('[^\w-]+', '_', s) s = re.sub('__+', '_', s) return s.strip('_')
PYTHON
{ "dummy_field": "" }
d20188
test
def A(*a): """convert iterable object into numpy array""" return np.array(a[0]) if len(a)==1 else [np.array(o) for o in a]
PYTHON
{ "dummy_field": "" }
d20189
test
def __init__(self): """Initialize the state of the object""" self.state = self.STATE_INITIALIZING self.state_start = time.time()
PYTHON
{ "dummy_field": "" }
d20190
test
def scan(client, query=None, scroll='5m', raise_on_error=True, preserve_order=False, size=1000, **kwargs): """ Simple abstraction on top of the :meth:`~elasticsearch.Elasticsearch.scroll` api - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To have a standard order in the returned documents (either by score or explicit sort definition) when scrolling, use ``preserve_order=True``. This may be an expensive operation and will negate the performance benefits of using ``scan``. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use :arg query: body for the :meth:`~elasticsearch.Elasticsearch.search` api :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg raise_on_error: raises an exception (``ScanError``) if an error is encountered (some shards fail to execute). By default we raise. :arg preserve_order: don't set the ``search_type`` to ``scan`` - this will cause the scroll to paginate with preserving the order. Note that this can be an extremely expensive operation and can easily lead to unpredictable results, use with caution. :arg size: size (per shard) of the batch send at each iteration. Any additional keyword arguments will be passed to the initial :meth:`~elasticsearch.Elasticsearch.search` call:: scan(es, query={"query": {"match": {"title": "python"}}}, index="orders-*", doc_type="books" ) """ if not preserve_order: kwargs['search_type'] = 'scan' # initial search resp = client.search(body=query, scroll=scroll, size=size, **kwargs) scroll_id = resp.get('_scroll_id') if scroll_id is None: return first_run = True while True: # if we didn't set search_type to scan initial search contains data if preserve_order and first_run: first_run = False else: resp = client.scroll(scroll_id, scroll=scroll) for hit in resp['hits']['hits']: yield hit # check if we have any errrors if resp["_shards"]["failed"]: logger.warning( 'Scroll request has failed on %d shards out of %d.', resp['_shards']['failed'], resp['_shards']['total'] ) if raise_on_error: raise ScanError( 'Scroll request has failed on %d shards out of %d.' % (resp['_shards']['failed'], resp['_shards']['total']) ) scroll_id = resp.get('_scroll_id') # end of scroll if scroll_id is None or not resp['hits']['hits']: break
PYTHON
{ "dummy_field": "" }
d20192
test
def attr_cache_clear(self): node = extract_node("""def cache_clear(self): pass""") return BoundMethod(proxy=node, bound=self._instance.parent.scope())
PYTHON
{ "dummy_field": "" }
d20193
test
def stop_at(iterable, idx): """Stops iterating before yielding the specified idx.""" for i, item in enumerate(iterable): if i == idx: return yield item
PYTHON
{ "dummy_field": "" }
d20194
test
def date_to_datetime(x): """Convert a date into a datetime""" if not isinstance(x, datetime) and isinstance(x, date): return datetime.combine(x, time()) return x
PYTHON
{ "dummy_field": "" }
d20195
test
def accel_next(self, *args): """Callback to go to the next tab. Called by the accel key. """ if self.get_notebook().get_current_page() + 1 == self.get_notebook().get_n_pages(): self.get_notebook().set_current_page(0) else: self.get_notebook().next_page() return True
PYTHON
{ "dummy_field": "" }
d20196
test
def clean_dataframe(df): """Fill NaNs with the previous value, the next value or if all are NaN then 1.0""" df = df.fillna(method='ffill') df = df.fillna(0.0) return df
PYTHON
{ "dummy_field": "" }
d20201
test
def advance_one_line(self): """Advances to next line.""" current_line = self._current_token.line_number while current_line == self._current_token.line_number: self._current_token = ConfigParser.Token(*next(self._token_generator))
PYTHON
{ "dummy_field": "" }
d20202
test
def round_to_int(number, precision): """Round a number to a precision""" precision = int(precision) rounded = (int(number) + precision / 2) // precision * precision return rounded
PYTHON
{ "dummy_field": "" }
d20204
test
def __add_namespaceinfo(self, ni): """Internal method to directly add a _NamespaceInfo object to this set. No sanity checks are done (e.g. checking for prefix conflicts), so be sure to do it yourself before calling this.""" self.__ns_uri_map[ni.uri] = ni for prefix in ni.prefixes: self.__prefix_map[prefix] = ni
PYTHON
{ "dummy_field": "" }
d20207
test
def to_int64(a): """Return view of the recarray with all int32 cast to int64.""" # build new dtype and replace i4 --> i8 def promote_i4(typestr): if typestr[1:] == 'i4': typestr = typestr[0]+'i8' return typestr dtype = [(name, promote_i4(typestr)) for name,typestr in a.dtype.descr] return a.astype(dtype)
PYTHON
{ "dummy_field": "" }
d20208
test
def close(self): """Send a close message to the external process and join it.""" try: self._conn.send((self._CLOSE, None)) self._conn.close() except IOError: # The connection was already closed. pass self._process.join()
PYTHON
{ "dummy_field": "" }
d20209
test
def reduce_fn(x): """ Aggregation function to get the first non-zero value. """ values = x.values if pd and isinstance(x, pd.Series) else x for v in values: if not is_nan(v): return v return np.NaN
PYTHON
{ "dummy_field": "" }
d20211
test
def coverage(ctx, opts=""): """ Execute all tests (normal and slow) with coverage enabled. """ return test(ctx, coverage=True, include_slow=True, opts=opts)
PYTHON
{ "dummy_field": "" }
d20212
test
def glob_by_extensions(directory, extensions): """ Returns files matched by all extensions in the extensions list """ directorycheck(directory) files = [] xt = files.extend for ex in extensions: xt(glob.glob('{0}/*.{1}'.format(directory, ex))) return files
PYTHON
{ "dummy_field": "" }
d20214
test
def calc_list_average(l): """ Calculates the average value of a list of numbers Returns a float """ total = 0.0 for value in l: total += value return total / len(l)
PYTHON
{ "dummy_field": "" }
d20219
test
def lock(self, block=True): """ Lock connection from being used else where """ self._locked = True return self._lock.acquire(block)
PYTHON
{ "dummy_field": "" }
d20232
test
def to_array(self): """Convert the table to a structured NumPy array.""" dt = np.dtype(list(zip(self.labels, (c.dtype for c in self.columns)))) arr = np.empty_like(self.columns[0], dt) for label in self.labels: arr[label] = self[label] return arr
PYTHON
{ "dummy_field": "" }
d20233
test
def join_cols(cols): """Join list of columns into a string for a SQL query""" return ", ".join([i for i in cols]) if isinstance(cols, (list, tuple, set)) else cols
PYTHON
{ "dummy_field": "" }
d20234
test
def lines(input): """Remove comments and empty lines""" for raw_line in input: line = raw_line.strip() if line and not line.startswith('#'): yield strip_comments(line)
PYTHON
{ "dummy_field": "" }
d20238
test
def toJson(protoObject, indent=None): """ Serialises a protobuf object as json """ # Using the internal method because this way we can reformat the JSON js = json_format.MessageToDict(protoObject, False) return json.dumps(js, indent=indent)
PYTHON
{ "dummy_field": "" }
d20239
test
def cross_join(df1, df2): """ Return a dataframe that is a cross between dataframes df1 and df2 ref: https://github.com/pydata/pandas/issues/5401 """ if len(df1) == 0: return df2 if len(df2) == 0: return df1 # Add as lists so that the new index keeps the items in # the order that they are added together all_columns = pd.Index(list(df1.columns) + list(df2.columns)) df1['key'] = 1 df2['key'] = 1 return pd.merge(df1, df2, on='key').loc[:, all_columns]
PYTHON
{ "dummy_field": "" }
d20240
test
def dotproduct(X, Y): """Return the sum of the element-wise product of vectors x and y. >>> dotproduct([1, 2, 3], [1000, 100, 10]) 1230 """ return sum([x * y for x, y in zip(X, Y)])
PYTHON
{ "dummy_field": "" }
d20241
test
def dates_in_range(start_date, end_date): """Returns all dates between two dates. Inclusive of the start date but not the end date. Args: start_date (datetime.date) end_date (datetime.date) Returns: (list) of datetime.date objects """ return [ start_date + timedelta(n) for n in range(int((end_date - start_date).days)) ]
PYTHON
{ "dummy_field": "" }
d20242
test
def get_remote_content(filepath): """ A handy wrapper to get a remote file content """ with hide('running'): temp = BytesIO() get(filepath, temp) content = temp.getvalue().decode('utf-8') return content.strip()
PYTHON
{ "dummy_field": "" }
d20244
test
def _removeStopwords(text_list): """ Removes stopwords contained in a list of words. :param text_string: A list of strings. :type text_string: list. :returns: The input ``text_list`` with stopwords removed. :rtype: list """ output_list = [] for word in text_list: if word.lower() not in _stopwords: output_list.append(word) return output_list
PYTHON
{ "dummy_field": "" }
d20245
test
def _increase_file_handle_limit(): """Raise the open file handles permitted by the Dusty daemon process and its child processes. The number we choose here needs to be within the OS X default kernel hard limit, which is 10240.""" logging.info('Increasing file handle limit to {}'.format(constants.FILE_HANDLE_LIMIT)) resource.setrlimit(resource.RLIMIT_NOFILE, (constants.FILE_HANDLE_LIMIT, resource.RLIM_INFINITY))
PYTHON
{ "dummy_field": "" }
d20246
test
def get_longest_orf(orfs): """Find longest ORF from the given list of ORFs.""" sorted_orf = sorted(orfs, key=lambda x: len(x['sequence']), reverse=True)[0] return sorted_orf
PYTHON
{ "dummy_field": "" }
d20247
test
def pprint_for_ordereddict(): """ Context manager that causes pprint() to print OrderedDict objects as nicely as standard Python dictionary objects. """ od_saved = OrderedDict.__repr__ try: OrderedDict.__repr__ = dict.__repr__ yield finally: OrderedDict.__repr__ = od_saved
PYTHON
{ "dummy_field": "" }
d20250
test
async def async_input(prompt): """ Python's ``input()`` is blocking, which means the event loop we set above can't be running while we're blocking there. This method will let the loop run while we wait for input. """ print(prompt, end='', flush=True) return (await loop.run_in_executor(None, sys.stdin.readline)).rstrip()
PYTHON
{ "dummy_field": "" }
d20253
test
def newest_file(file_iterable): """ Returns the name of the newest file given an iterable of file names. """ return max(file_iterable, key=lambda fname: os.path.getmtime(fname))
PYTHON
{ "dummy_field": "" }
d20254
test
def truncate_table(self, tablename): """ SQLite3 doesn't support direct truncate, so we just use delete here """ self.get(tablename).remove() self.db.commit()
PYTHON
{ "dummy_field": "" }
d20255
test
def find_all(self, string, callback): """ Wrapper on iter method, callback gets an iterator result """ for index, output in self.iter(string): callback(index, output)
PYTHON
{ "dummy_field": "" }
d20256
test
def __next__(self, reward, ask_id, lbl): """For Python3 compatibility of generator.""" return self.next(reward, ask_id, lbl)
PYTHON
{ "dummy_field": "" }
d20260
test
def _assert_is_type(name, value, value_type): """Assert that a value must be a given type.""" if not isinstance(value, value_type): if type(value_type) is tuple: types = ', '.join(t.__name__ for t in value_type) raise ValueError('{0} must be one of ({1})'.format(name, types)) else: raise ValueError('{0} must be {1}' .format(name, value_type.__name__))
PYTHON
{ "dummy_field": "" }
d20261
test
def imapchain(*a, **kwa): """ Like map but also chains the results. """ imap_results = map( *a, **kwa ) return itertools.chain( *imap_results )
PYTHON
{ "dummy_field": "" }
d20263
test
def paginate(self, request, offset=0, limit=None): """Paginate queryset.""" return self.collection.offset(offset).limit(limit), self.collection.count()
PYTHON
{ "dummy_field": "" }
d20264
test
def factors(n): """ Computes all the integer factors of the number `n` Example: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> result = sorted(ut.factors(10)) >>> print(result) [1, 2, 5, 10] References: http://stackoverflow.com/questions/6800193/finding-all-the-factors """ return set(reduce(list.__add__, ([i, n // i] for i in range(1, int(n ** 0.5) + 1) if n % i == 0)))
PYTHON
{ "dummy_field": "" }
d20265
test
def xmltreefromfile(filename): """Internal function to read an XML file""" try: return ElementTree.parse(filename, ElementTree.XMLParser(collect_ids=False)) except TypeError: return ElementTree.parse(filename, ElementTree.XMLParser())
PYTHON
{ "dummy_field": "" }
d20267
test
def _go_to_line(editor, line): """ Move cursor to this line in the current buffer. """ b = editor.application.current_buffer b.cursor_position = b.document.translate_row_col_to_index(max(0, int(line) - 1), 0)
PYTHON
{ "dummy_field": "" }
d20270
test
def get_number(s, cast=int): """ Try to get a number out of a string, and cast it. """ import string d = "".join(x for x in str(s) if x in string.digits) return cast(d)
PYTHON
{ "dummy_field": "" }
d20271
test
def is_sqlatype_string(coltype: Union[TypeEngine, VisitableType]) -> bool: """ Is the SQLAlchemy column type a string type? """ coltype = _coltype_to_typeengine(coltype) return isinstance(coltype, sqltypes.String)
PYTHON
{ "dummy_field": "" }
d20274
test
def returned(n): """Generate a random walk and return True if the walker has returned to the origin after taking `n` steps. """ ## `takei` yield lazily so we can short-circuit and avoid computing the rest of the walk for pos in randwalk() >> drop(1) >> takei(xrange(n-1)): if pos == Origin: return True return False
PYTHON
{ "dummy_field": "" }
d20275
test
def _removeTags(tags, objects): """ Removes tags from objects """ for t in tags: for o in objects: o.tags.remove(t) return True
PYTHON
{ "dummy_field": "" }
d20278
test
def yaml_to_param(obj, name): """ Return the top-level element of a document sub-tree containing the YAML serialization of a Python object. """ return from_pyvalue(u"yaml:%s" % name, unicode(yaml.dump(obj)))
PYTHON
{ "dummy_field": "" }
d20279
test
def __init__(self, function): """function: to be called with each stream element as its only argument """ super(filter, self).__init__() self.function = function
PYTHON
{ "dummy_field": "" }
d20282
test
def to_topojson(self): """Adds points and converts to topojson string.""" topojson = self.topojson topojson["objects"]["points"] = { "type": "GeometryCollection", "geometries": [point.to_topojson() for point in self.points.all()], } return json.dumps(topojson)
PYTHON
{ "dummy_field": "" }
d20289
test
def connect_rds(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.rds.RDSConnection` :return: A connection to RDS """ from boto.rds import RDSConnection return RDSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
PYTHON
{ "dummy_field": "" }
d20293
test
def stringify_col(df, col_name): """ Take a dataframe and string-i-fy a column of values. Turn nan/None into "" and all other values into strings. Parameters ---------- df : dataframe col_name : string """ df = df.copy() df[col_name] = df[col_name].fillna("") df[col_name] = df[col_name].astype(str) return df
PYTHON
{ "dummy_field": "" }
d20298
test
def process_request(self, request, response): """Logs the basic endpoint requested""" self.logger.info('Requested: {0} {1} {2}'.format(request.method, request.relative_uri, request.content_type))
PYTHON
{ "dummy_field": "" }
d20300
test
def validate(request: Union[Dict, List], schema: dict) -> Union[Dict, List]: """ Wraps jsonschema.validate, returning the same object passed in. Args: request: The deserialized-from-json request. schema: The jsonschema schema to validate against. Raises: jsonschema.ValidationError """ jsonschema_validate(request, schema) return request
PYTHON
{ "dummy_field": "" }
d20302
test
def _from_bytes(bytes, byteorder="big", signed=False): """This is the same functionality as ``int.from_bytes`` in python 3""" return int.from_bytes(bytes, byteorder=byteorder, signed=signed)
PYTHON
{ "dummy_field": "" }
d20303
test
def im2mat(I): """Converts and image to matrix (one pixel per line)""" return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
PYTHON
{ "dummy_field": "" }
d20304
test
def sometimesish(fn): """ Has a 50/50 chance of calling a function """ def wrapped(*args, **kwargs): if random.randint(1, 2) == 1: return fn(*args, **kwargs) return wrapped
PYTHON
{ "dummy_field": "" }
d20310
test
def search(self, filterstr, attrlist): """Query the configured LDAP server.""" return self._paged_search_ext_s(self.settings.BASE, ldap.SCOPE_SUBTREE, filterstr=filterstr, attrlist=attrlist, page_size=self.settings.PAGE_SIZE)
PYTHON
{ "dummy_field": "" }
d20316
test
def load(self): """Load proxy list from configured proxy source""" self._list = self._source.load() self._list_iter = itertools.cycle(self._list)
PYTHON
{ "dummy_field": "" }
d20317
test
def lognorm(x, mu, sigma=1.0): """ Log-normal function from scipy """ return stats.lognorm(sigma, scale=mu).pdf(x)
PYTHON
{ "dummy_field": "" }
d20318
test
def gcd_float(numbers, tol=1e-8): """ Returns the greatest common divisor for a sequence of numbers. Uses a numerical tolerance, so can be used on floats Args: numbers: Sequence of numbers. tol: Numerical tolerance Returns: (int) Greatest common divisor of numbers. """ def pair_gcd_tol(a, b): """Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ while b > tol: a, b = b, a % b return a n = numbers[0] for i in numbers: n = pair_gcd_tol(n, i) return n
PYTHON
{ "dummy_field": "" }
d20320
test
def vector_distance(a, b): """The Euclidean distance between two vectors.""" a = np.array(a) b = np.array(b) return np.linalg.norm(a - b)
PYTHON
{ "dummy_field": "" }
d20323
test
def safe_dump(data, stream=None, **kwds): """implementation of safe dumper using Ordered Dict Yaml Dumper""" return yaml.dump(data, stream=stream, Dumper=ODYD, **kwds)
PYTHON
{ "dummy_field": "" }
d20325
test
def apply(self, func, args=(), kwds=dict()): """Equivalent of the apply() builtin function. It blocks till the result is ready.""" return self.apply_async(func, args, kwds).get()
PYTHON
{ "dummy_field": "" }