diff --git "a/valid_code.json" "b/valid_code.json" deleted file mode 100644--- "a/valid_code.json" +++ /dev/null @@ -1,11086 +0,0 @@ -[ - { - "code": "def render_ranks (graph, ranks, dot_file=\"graph.dot\"):\n if dot_file:\n write_dot(graph, ranks, path=dot_file)", - "docstring": "render the TextRank graph for visual formats" - }, - { - "code": "def _periodicfeatures_worker(task):\n pfpickle, lcbasedir, outdir, starfeatures, kwargs = task\n try:\n return get_periodicfeatures(pfpickle,\n lcbasedir,\n outdir,\n starfeatures=starfeatures,\n **kwargs)\n except Exception as e:\n LOGEXCEPTION('failed to get periodicfeatures for %s' % pfpickle)", - "docstring": "This is a parallel worker for the drivers below." - }, - { - "code": "def textpath(self, i):\n if len(self._textpaths) == i:\n self._ctx.font(self.font, self.fontsize)\n txt = self.q[i]\n if len(self.q) > 1:\n txt += \" (\"+str(i+1)+\"/\" + str(len(self.q))+\")\"\n p = self._ctx.textpath(txt, 0, 0, width=self._w)\n h = self._ctx.textheight(txt, width=self._w)\n self._textpaths.append((p, h))\n return self._textpaths[i]", - "docstring": "Returns a cached textpath of the given text in queue." - }, - { - "code": "def delete_milestone_request(session, milestone_request_id):\n params_data = {\n 'action': 'delete',\n }\n endpoint = 'milestone_requests/{}'.format(milestone_request_id)\n response = make_put_request(session, endpoint, params_data=params_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n raise MilestoneRequestNotDeletedException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Delete a milestone request" - }, - { - "code": "def build_tmp_h5(data, samples):\n snames = [i.name for i in samples]\n snames.sort()\n uhandle = os.path.join(data.dirs.across, data.name+\".utemp.sort\")\n bseeds = os.path.join(data.dirs.across, data.name+\".tmparrs.h5\")\n get_seeds_and_hits(uhandle, bseeds, snames)", - "docstring": "build tmp h5 arrays that can return quick access for nloci" - }, - { - "code": "def _getScaledValue(self, inpt):\n if inpt == SENTINEL_VALUE_FOR_MISSING_DATA:\n return None\n else:\n val = inpt\n if val < self.minval:\n val = self.minval\n elif val > self.maxval:\n val = self.maxval\n scaledVal = math.log10(val)\n return scaledVal", - "docstring": "Convert the input, which is in normal space, into log space" - }, - { - "code": "def map(self, data):\n result = []\n for char in data:\n ret = None\n for lookup in self.mapping:\n ret = lookup(char)\n if ret is not None:\n break\n if ret is not None:\n result.append(ret)\n else:\n result.append(char)\n return result", - "docstring": "Mapping part of string preparation." - }, - { - "code": "def md_link(node):\n mimetype = node.find(\"type\")\n mdtype = node.find(\"metadataType\")\n content = node.find(\"content\")\n if None in [mimetype, mdtype, content]:\n return None\n else:\n return (mimetype.text, mdtype.text, content.text)", - "docstring": "Extract a metadata link tuple from an xml node" - }, - { - "code": "def update_environment(self, environment_name, description=None, option_settings=[], tier_type=None, tier_name=None,\n tier_version='1.0'):\n out(\"Updating environment: \" + str(environment_name))\n messages = self.ebs.validate_configuration_settings(self.app_name, option_settings,\n environment_name=environment_name)\n messages = messages['ValidateConfigurationSettingsResponse']['ValidateConfigurationSettingsResult']['Messages']\n ok = True\n for message in messages:\n if message['Severity'] == 'error':\n ok = False\n out(\"[\" + message['Severity'] + \"] \" + str(environment_name) + \" - '\" \\\n + message['Namespace'] + \":\" + message['OptionName'] + \"': \" + message['Message'])\n self.ebs.update_environment(\n environment_name=environment_name,\n description=description,\n option_settings=option_settings,\n tier_type=tier_type,\n tier_name=tier_name,\n tier_version=tier_version)", - "docstring": "Updates an application version" - }, - { - "code": "def _init_docker_vm():\n if not _dusty_vm_exists():\n log_to_client('Initializing new Dusty VM with Docker Machine')\n machine_options = ['--driver', 'virtualbox',\n '--virtualbox-cpu-count', '-1',\n '--virtualbox-boot2docker-url', constants.CONFIG_BOOT2DOCKER_URL,\n '--virtualbox-memory', str(get_config_value(constants.CONFIG_VM_MEM_SIZE)),\n '--virtualbox-hostonly-nictype', constants.VM_NIC_TYPE]\n check_call_demoted(['docker-machine', 'create'] + machine_options + [constants.VM_MACHINE_NAME],\n redirect_stderr=True)", - "docstring": "Initialize the Dusty VM if it does not already exist." - }, - { - "code": "def down(self):\n self.swap(self.get_ordering_queryset().filter(order__gt=self.order))", - "docstring": "Move this object down one position." - }, - { - "code": "def remove_falsy_values(counter: Mapping[Any, int]) -> Mapping[Any, int]:\n return {\n label: count\n for label, count in counter.items()\n if count\n }", - "docstring": "Remove all values that are zero." - }, - { - "code": "def street_number():\n length = int(random.choice(string.digits[1:6]))\n return ''.join(random.sample(string.digits, length))", - "docstring": "Return a random street number." - }, - { - "code": "def _remove_default_tz_bindings(self, context, network_id):\n default_tz = CONF.NVP.default_tz\n if not default_tz:\n LOG.warn(\"additional_default_tz_types specified, \"\n \"but no default_tz. Skipping \"\n \"_remove_default_tz_bindings().\")\n return\n if not network_id:\n LOG.warn(\"neutron network_id not specified, skipping \"\n \"_remove_default_tz_bindings()\")\n return\n for net_type in CONF.NVP.additional_default_tz_types:\n if net_type in TZ_BINDINGS:\n binding = TZ_BINDINGS[net_type]\n binding.remove(context, default_tz, network_id)\n else:\n LOG.warn(\"Unknown default tz type %s\" % (net_type))", - "docstring": "Deconfigure any additional default transport zone bindings." - }, - { - "code": "def _parse_local_mount_uri(self, raw_uri):\n raw_uri = directory_fmt(raw_uri)\n _, docker_path = _local_uri_rewriter(raw_uri)\n local_path = docker_path[len('file'):]\n docker_uri = os.path.join(self._relative_path, docker_path)\n return local_path, docker_uri", - "docstring": "Return a valid docker_path for a local file path." - }, - { - "code": "def _categoryToLabelList(self, category):\n if category is None:\n return []\n labelList = []\n labelNum = 0\n while category > 0:\n if category % 2 == 1:\n labelList.append(self.saved_categories[labelNum])\n labelNum += 1\n category = category >> 1\n return labelList", - "docstring": "Converts a category number into a list of labels" - }, - { - "code": "def run(self):\n print \"-\" * 80 + \"Computing the SDR\" + \"-\" * 80\n self.sp.compute(self.inputArray, True, self.activeArray)\n print self.activeArray.nonzero()", - "docstring": "Run the spatial pooler with the input vector" - }, - { - "code": "def geo(lat, lon, radius, unit='km'):\n return GeoValue(lat, lon, radius, unit)", - "docstring": "Indicate that value is a geo region" - }, - { - "code": "def create(self, server):\n return server.post(\n 'challenge_admin',\n self.as_payload(),\n replacements={'slug': self.slug})", - "docstring": "Create the challenge on the server" - }, - { - "code": "def int2fin_reference(n):\n checksum = 10 - (sum([int(c) * i for c, i in zip(str(n)[::-1], it.cycle((7, 3, 1)))]) % 10)\n if checksum == 10:\n checksum = 0\n return \"%s%s\" % (n, checksum)", - "docstring": "Calculates a checksum for a Finnish national reference number" - }, - { - "code": "def _parse_error(self, err):\r\n self.logger.debug(err)\r\n stack = err.get('stack', [])\r\n if not err['message'].startswith('parse error:'):\r\n err['message'] = 'error: ' + err['message']\r\n errmsg = 'Octave evaluation error:\\n%s' % err['message']\r\n if not isinstance(stack, StructArray):\r\n return errmsg\r\n errmsg += '\\nerror: called from:'\r\n for item in stack[:-1]:\r\n errmsg += '\\n %(name)s at line %(line)d' % item\r\n try:\r\n errmsg += ', column %(column)d' % item\r\n except Exception:\r\n pass\r\n return errmsg", - "docstring": "Create a traceback for an Octave evaluation error." - }, - { - "code": "def add_tag(self, tag):\n self.tags = list(set(self.tags or []) | set([tag]))", - "docstring": "Adds a tag to the list of tags and makes sure the result list contains only unique results." - }, - { - "code": "def _review_all(self, launchers):\n if self.launch_args is not None:\n proceed = self.review_args(self.launch_args,\n show_repr=True,\n heading='Meta Arguments')\n if not proceed: return False\n reviewers = [self.review_args,\n self.review_command,\n self.review_launcher]\n for (count, launcher) in enumerate(launchers):\n if not all(reviewer(launcher) for reviewer in reviewers):\n print(\"\\n == Aborting launch ==\")\n return False\n if len(launchers)!= 1 and count < len(launchers)-1:\n skip_remaining = self.input_options(['Y', 'n','quit'],\n '\\nSkip remaining reviews?', default='y')\n if skip_remaining == 'y': break\n elif skip_remaining == 'quit': return False\n if self.input_options(['y','N'], 'Execute?', default='n') != 'y':\n return False\n else:\n return self._launch_all(launchers)", - "docstring": "Runs the review process for all the launchers." - }, - { - "code": "def lookup_document_pointer(ident_hash, cursor):\n id, version = split_ident_hash(ident_hash, split_version=True)\n stmt = \"SELECT name FROM modules WHERE uuid = %s\"\n args = [id]\n if version and version[0] is not None:\n operator = version[1] is None and 'is' or '='\n stmt += \" AND (major_version = %s AND minor_version {} %s)\" \\\n .format(operator)\n args.extend(version)\n cursor.execute(stmt, args)\n try:\n title = cursor.fetchone()[0]\n except TypeError:\n raise DocumentLookupError()\n else:\n metadata = {'title': title}\n return cnxepub.DocumentPointer(ident_hash, metadata)", - "docstring": "Lookup a document by id and version." - }, - { - "code": "async def get_conversation(self, get_conversation_request):\n response = hangouts_pb2.GetConversationResponse()\n await self._pb_request('conversations/getconversation',\n get_conversation_request, response)\n return response", - "docstring": "Return conversation info and recent events." - }, - { - "code": "def t_tabbedheredoc(self, t):\n r'<<-\\S+\\r?\\n'\n t.lexer.is_tabbed = True\n self._init_heredoc(t)\n t.lexer.begin('tabbedheredoc')", - "docstring": "r'<<-\\S+\\r?\\n" - }, - { - "code": "def conference_undeaf(self, call_params):\n path = '/' + self.api_version + '/ConferenceUndeaf/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference Undeaf helper" - }, - { - "code": "def create_payload(self, x86_file, x64_file, payload_file):\n sc_x86 = open(os.path.join(self.datadir, x86_file), 'rb').read()\n sc_x64 = open(os.path.join(self.datadir, x64_file), 'rb').read()\n fp = open(os.path.join(self.datadir, payload_file), 'wb')\n fp.write(b'\\x31\\xc0\\x40\\x0f\\x84' + pack(' Unicorn" - }, - { - "code": "def execute(helper, config, args):\n environment_name = args.environment\n (events, next_token) = helper.describe_events(environment_name, start_time=datetime.now().isoformat())\n for event in events:\n print((\"[\"+event['Severity']+\"] \"+event['Message']))", - "docstring": "Describes recent events for an environment." - }, - { - "code": "def start(self):\n self.bot_start_time = datetime.now()\n self.webserver = Webserver(self.config['webserver']['host'], self.config['webserver']['port'])\n self.plugins.load()\n self.plugins.load_state()\n self._find_event_handlers()\n self.sc = ThreadedSlackClient(self.config['slack_token'])\n self.always_send_dm = ['_unauthorized_']\n if 'always_send_dm' in self.config:\n self.always_send_dm.extend(map(lambda x: '!' + x, self.config['always_send_dm']))\n logging.getLogger('Rocket.Errors.ThreadPool').setLevel(logging.INFO)\n self.is_setup = True\n if self.test_mode:\n self.metrics['startup_time'] = (datetime.now() - self.bot_start_time).total_seconds() * 1000.0", - "docstring": "Initializes the bot, plugins, and everything." - }, - { - "code": "def model_to_data(self, sigma=0.0):\n im = self.model.copy()\n im += sigma*np.random.randn(*im.shape)\n self.set_image(util.NullImage(image=im))", - "docstring": "Switch out the data for the model's recreation of the data." - }, - { - "code": "def create_switch(apps, schema_editor):\n Switch = apps.get_model('waffle', 'Switch')\n Switch.objects.get_or_create(name='SAP_USE_ENTERPRISE_ENROLLMENT_PAGE', defaults={'active': False})", - "docstring": "Create and activate the SAP_USE_ENTERPRISE_ENROLLMENT_PAGE switch if it does not already exist." - }, - { - "code": "def db_connect(connection_string=None, **kwargs):\n if connection_string is None:\n connection_string = get_current_registry().settings[CONNECTION_STRING]\n db_conn = psycopg2.connect(connection_string, **kwargs)\n try:\n with db_conn:\n yield db_conn\n finally:\n db_conn.close()", - "docstring": "Function to supply a database connection object." - }, - { - "code": "def _load_cpp4(self, filename):\n ccp4 = CCP4.CCP4()\n ccp4.read(filename)\n grid, edges = ccp4.histogramdd()\n self.__init__(grid=grid, edges=edges, metadata=self.metadata)", - "docstring": "Initializes Grid from a CCP4 file." - }, - { - "code": "def intersection(l1, l2):\n if len(l1) == 0 or len(l2) == 0:\n return []\n out = []\n l2_pos = 0\n for l in l1:\n while l2_pos < len(l2) and l2[l2_pos].end < l.start:\n l2_pos += 1\n if l2_pos == len(l2):\n break\n while l2_pos < len(l2) and l.intersects(l2[l2_pos]):\n out.append(l.intersection(l2[l2_pos]))\n l2_pos += 1\n l2_pos = max(0, l2_pos - 1)\n return out", - "docstring": "Returns intersection of two lists. Assumes the lists are sorted by start positions" - }, - { - "code": "def annotate_metadata_action(repo):\n package = repo.package \n print(\"Including history of actions\")\n with cd(repo.rootdir): \n filename = \".dgit/log.json\" \n if os.path.exists(filename): \n history = open(filename).readlines() \n actions = []\n for a in history: \n try: \n a = json.loads(a)\n for x in ['code']: \n if x not in a or a[x] == None: \n a[x] = \"...\"\n actions.append(a)\n except:\n pass \n package['actions'] = actions", - "docstring": "Update metadata with the action history" - }, - { - "code": "def last_modified_version(self, **kwargs):\n self.items(**kwargs)\n return int(self.request.headers.get(\"last-modified-version\", 0))", - "docstring": "Get the last modified version" - }, - { - "code": "def fail(message, exitcode=1):\n sys.stderr.write('ERROR: {}\\n'.format(message))\n sys.stderr.flush()\n sys.exit(exitcode)", - "docstring": "Exit with error code and message." - }, - { - "code": "def _sample_with_priority(self, p):\n parent = 0\n while True:\n left = 2 * parent + 1\n if left >= len(self._memory):\n return parent\n left_p = self._memory[left] if left < self._capacity - 1 \\\n else (self._memory[left].priority or 0)\n if p <= left_p:\n parent = left\n else:\n if left + 1 >= len(self._memory):\n raise RuntimeError('Right child is expected to exist.')\n p -= left_p\n parent = left + 1", - "docstring": "Sample random element with priority greater than p." - }, - { - "code": "def report(self, obj, message, linenum, char_offset=0):\n self.controller.report(linenumber=linenum, filename=obj.path,\n severity=self.severity, message=message,\n rulename = self.__class__.__name__,\n char=char_offset)", - "docstring": "Report an error or warning" - }, - { - "code": "def copyto(self,\n new_abspath=None,\n new_dirpath=None,\n new_dirname=None,\n new_basename=None,\n new_fname=None,\n new_ext=None,\n overwrite=False,\n makedirs=False):\n self.assert_exists()\n p = self.change(\n new_abspath=new_abspath,\n new_dirpath=new_dirpath,\n new_dirname=new_dirname,\n new_basename=new_basename,\n new_fname=new_fname,\n new_ext=new_ext,\n )\n if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):\n if self.abspath != p.abspath:\n try:\n shutil.copy(self.abspath, p.abspath)\n except IOError as e:\n if makedirs:\n os.makedirs(p.parent.abspath)\n shutil.copy(self.abspath, p.abspath)\n else:\n raise e\n return p", - "docstring": "Copy this file to other place." - }, - { - "code": "def handle_add_fun(self, function_name):\n function_name = function_name.strip()\n try:\n function = get_function(function_name)\n except Exception, exc:\n self.wfile.write(js_error(exc) + NEWLINE)\n return\n if not getattr(function, 'view_decorated', None):\n self.functions[function_name] = (self.function_counter, function)\n else:\n self.functions[function_name] = (self.function_counter,\n function(self.log))\n self.function_counter += 1\n return True", - "docstring": "Add a function to the function list, in order." - }, - { - "code": "def does_not_contain_value(self, *values):\n self._check_dict_like(self.val, check_getitem=False)\n if len(values) == 0:\n raise ValueError('one or more value args must be given')\n else:\n found = []\n for v in values:\n if v in self.val.values():\n found.append(v)\n if found:\n self._err('Expected <%s> to not contain values %s, but did contain %s.' % (self.val, self._fmt_items(values), self._fmt_items(found)))\n return self", - "docstring": "Asserts that val is a dict and does not contain the given value or values." - }, - { - "code": "def AIMAFile(components, mode='r'):\n \"Open a file based at the AIMA root directory.\"\n import utils\n dir = os.path.dirname(utils.__file__)\n return open(apply(os.path.join, [dir] + components), mode)", - "docstring": "Open a file based at the AIMA root directory." - }, - { - "code": "def iter_space_block(self, text=None, width=60, fmtfunc=str):\n if width < 1:\n width = 1\n curline = ''\n text = (self.text if text is None else text) or ''\n for word in text.split():\n possibleline = ' '.join((curline, word)) if curline else word\n codelen = sum(len(s) for s in get_codes(possibleline))\n reallen = len(possibleline) - codelen\n if reallen > width:\n yield fmtfunc(curline)\n curline = word\n else:\n curline = possibleline\n if curline:\n yield fmtfunc(curline)", - "docstring": "Format block by wrapping on spaces." - }, - { - "code": "def state_size(self):\n return (LSTMStateTuple(self._num_units, self._num_units) if self._state_is_tuple else 2 * self._num_units)", - "docstring": "State size of the LSTMStateTuple." - }, - { - "code": "def iconcat(a, b):\n \"Same as a += b, for a and b sequences.\"\n if not hasattr(a, '__getitem__'):\n msg = \"'%s' object can't be concatenated\" % type(a).__name__\n raise TypeError(msg)\n a += b\n return a", - "docstring": "Same as a += b, for a and b sequences." - }, - { - "code": "def getSpec(cls):\n spec = {\n \"description\":IdentityRegion.__doc__,\n \"singleNodeOnly\":True,\n \"inputs\":{\n \"in\":{\n \"description\":\"The input vector.\",\n \"dataType\":\"Real32\",\n \"count\":0,\n \"required\":True,\n \"regionLevel\":False,\n \"isDefaultInput\":True,\n \"requireSplitterMap\":False},\n },\n \"outputs\":{\n \"out\":{\n \"description\":\"A copy of the input vector.\",\n \"dataType\":\"Real32\",\n \"count\":0,\n \"regionLevel\":True,\n \"isDefaultOutput\":True},\n },\n \"parameters\":{\n \"dataWidth\":{\n \"description\":\"Size of inputs\",\n \"accessMode\":\"Read\",\n \"dataType\":\"UInt32\",\n \"count\":1,\n \"constraints\":\"\"},\n },\n }\n return spec", - "docstring": "Return the Spec for IdentityRegion." - }, - { - "code": "def to_int64(a):\n def promote_i4(typestr):\n if typestr[1:] == 'i4':\n typestr = typestr[0]+'i8'\n return typestr\n dtype = [(name, promote_i4(typestr)) for name,typestr in a.dtype.descr]\n return a.astype(dtype)", - "docstring": "Return view of the recarray with all int32 cast to int64." - }, - { - "code": "def unexpo(intpart, fraction, expo):\n if expo > 0:\n f = len(fraction)\n intpart, fraction = intpart + fraction[:expo], fraction[expo:]\n if expo > f:\n intpart = intpart + '0'*(expo-f)\n elif expo < 0:\n i = len(intpart)\n intpart, fraction = intpart[:expo], intpart[expo:] + fraction\n if expo < -i:\n fraction = '0'*(-expo-i) + fraction\n return intpart, fraction", - "docstring": "Remove the exponent by changing intpart and fraction." - }, - { - "code": "def generate_media_pages(gallery):\n writer = PageWriter(gallery.settings, index_title=gallery.title)\n for album in gallery.albums.values():\n medias = album.medias\n next_medias = medias[1:] + [None]\n previous_medias = [None] + medias[:-1]\n media_groups = zip(medias, next_medias, previous_medias)\n for media_group in media_groups:\n writer.write(album, media_group)", - "docstring": "Generates and writes the media pages for all media in the gallery" - }, - { - "code": "def to_table(result):\n max_count = 20\n table, count = [], 0\n for role, envs_topos in result.items():\n for env, topos in envs_topos.items():\n for topo in topos:\n count += 1\n if count > max_count:\n continue\n else:\n table.append([role, env, topo])\n header = ['role', 'env', 'topology']\n rest_count = 0 if count <= max_count else count - max_count\n return table, header, rest_count", - "docstring": "normalize raw result to table" - }, - { - "code": "def env():\n if cij.ssh.env():\n cij.err(\"cij.lnvm.env: invalid SSH environment\")\n return 1\n lnvm = cij.env_to_dict(PREFIX, REQUIRED)\n nvme = cij.env_to_dict(\"NVME\", [\"DEV_NAME\"])\n if \"BGN\" not in lnvm.keys():\n cij.err(\"cij.lnvm.env: invalid LNVM_BGN\")\n return 1\n if \"END\" not in lnvm.keys():\n cij.err(\"cij.lnvm.env: invalid LNVM_END\")\n return 1\n if \"DEV_TYPE\" not in lnvm.keys():\n cij.err(\"cij.lnvm.env: invalid LNVM_DEV_TYPE\")\n return 1\n lnvm[\"DEV_NAME\"] = \"%sb%03de%03d\" % (nvme[\"DEV_NAME\"], int(lnvm[\"BGN\"]), int(lnvm[\"END\"]))\n lnvm[\"DEV_PATH\"] = \"/dev/%s\" % lnvm[\"DEV_NAME\"]\n cij.env_export(PREFIX, EXPORTED, lnvm)\n return 0", - "docstring": "Verify LNVM variables and construct exported variables" - }, - { - "code": "def require(self, req):\n reqs = req if isinstance(req, list) else [req]\n for req in reqs:\n if not isinstance(req, BumpRequirement):\n req = BumpRequirement(req)\n req.required = True\n req.required_by = self\n self.requirements.append(req)", - "docstring": "Add new requirements that must be fulfilled for this bump to occur" - }, - { - "code": "def FindNextMultiLineCommentEnd(lines, lineix):\n while lineix < len(lines):\n if lines[lineix].strip().endswith('*/'):\n return lineix\n lineix += 1\n return len(lines)", - "docstring": "We are inside a comment, find the end marker." - }, - { - "code": "def connect(self):\n try:\n self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self._socket.settimeout(TIMEOUT_SECONDS)\n self._socket.connect((self._ip, self._port))\n _LOGGER.debug(\"Successfully created Hub at %s:%s :)\", self._ip,\n self._port)\n except socket.error as error:\n _LOGGER.error(\"Error creating Hub: %s :(\", error)\n self._socket.close()", - "docstring": "Create and connect to socket for TCP communication with hub." - }, - { - "code": "def db_value(self, value):\n if not isinstance(value, UUID):\n value = UUID(value)\n parts = str(value).split(\"-\")\n reordered = ''.join([parts[2], parts[1], parts[0], parts[3], parts[4]])\n value = binascii.unhexlify(reordered)\n return super(OrderedUUIDField, self).db_value(value)", - "docstring": "Convert UUID to binary blob" - }, - { - "code": "def _nginx_stream_spec(port_spec, bridge_ip):\n server_string_spec = \"\\t server {\\n\"\n server_string_spec += \"\\t \\t {}\\n\".format(_nginx_listen_string(port_spec))\n server_string_spec += \"\\t \\t {}\\n\".format(_nginx_proxy_string(port_spec, bridge_ip))\n server_string_spec += \"\\t }\\n\"\n return server_string_spec", - "docstring": "This will output the nginx stream config string for specific port spec" - }, - { - "code": "def fmt(a, b):\n return 100 * np.min([a, b], axis=0).sum() / np.max([a, b], axis=0).sum()", - "docstring": "Figure of merit in time" - }, - { - "code": "def apt_key_exists(keyid):\n gpg_cmd = 'gpg --ignore-time-conflict --no-options --no-default-keyring --keyring /etc/apt/trusted.gpg'\n with settings(hide('everything'), warn_only=True):\n res = run('%(gpg_cmd)s --fingerprint %(keyid)s' % locals())\n return res.succeeded", - "docstring": "Check if the given key id exists in apt keyring." - }, - { - "code": "def load_gltf(self):\n with open(self.path) as fd:\n self.meta = GLTFMeta(self.path, json.load(fd))", - "docstring": "Loads a gltf json file" - }, - { - "code": "def _step2func(self, samples, force, ipyclient):\n if self._headers:\n print(\"\\n Step 2: Filtering reads \")\n if not self.samples.keys():\n raise IPyradWarningExit(FIRST_RUN_1)\n samples = _get_samples(self, samples)\n if not force:\n if all([i.stats.state >= 2 for i in samples]):\n print(EDITS_EXIST.format(len(samples)))\n return\n assemble.rawedit.run2(self, samples, force, ipyclient)", - "docstring": "hidden wrapped function to start step 2" - }, - { - "code": "def _ready(self):\n if self._has_state(self.COMPLETED) or self._has_state(self.CANCELLED):\n return\n self._set_state(self.READY)\n self.task_spec._on_ready(self)", - "docstring": "Marks the task as ready for execution." - }, - { - "code": "def get(self, request, response):\n self.assert_operations('read')\n items = self.read()\n if not items:\n raise http.exceptions.NotFound()\n if (isinstance(items, Iterable)\n and not isinstance(items, six.string_types)) and items:\n items = pagination.paginate(self.request, self.response, items)\n self.make_response(items)", - "docstring": "Processes a `GET` request." - }, - { - "code": "def _get_results(self, identity_provider, param_name, param_value, result_field_name):\n try:\n kwargs = {param_name: param_value}\n returned = self.client.providers(identity_provider).users.get(**kwargs)\n results = returned.get('results', [])\n except HttpNotFoundError:\n LOGGER.error(\n 'username not found for third party provider={provider}, {querystring_param}={id}'.format(\n provider=identity_provider,\n querystring_param=param_name,\n id=param_value\n )\n )\n results = []\n for row in results:\n if row.get(param_name) == param_value:\n return row.get(result_field_name)\n return None", - "docstring": "Calls the third party auth api endpoint to get the mapping between usernames and remote ids." - }, - { - "code": "def delete_acl_request(request):\n uuid_ = request.matchdict['uuid']\n posted = request.json\n permissions = [(x['uid'], x['permission'],) for x in posted]\n with db_connect() as db_conn:\n with db_conn.cursor() as cursor:\n remove_acl(cursor, uuid_, permissions)\n resp = request.response\n resp.status_int = 200\n return resp", - "docstring": "Submission to remove an ACL." - }, - { - "code": "def _findAll(self, name, attrs, text, limit, generator, **kwargs):\n \"Iterates over a generator looking for things that match.\"\n if isinstance(name, SoupStrainer):\n strainer = name\n else:\n strainer = SoupStrainer(name, attrs, text, **kwargs)\n results = ResultSet(strainer)\n g = generator()\n while True:\n try:\n i = g.next()\n except StopIteration:\n break\n if i:\n found = strainer.search(i)\n if found:\n results.append(found)\n if limit and len(results) >= limit:\n break\n return results", - "docstring": "Iterates over a generator looking for things that match." - }, - { - "code": "def _generateFind(self, **kwargs):\n for needle in self._generateChildren():\n if needle._match(**kwargs):\n yield needle", - "docstring": "Generator which yields matches on AXChildren." - }, - { - "code": "def comments_load(self):\n self.comment_times,self.comment_sweeps,self.comment_tags=[],[],[]\n self.comments=0\n self.comment_text=\"\"\n try:\n self.comment_tags = list(self.ABFblock.segments[0].eventarrays[0].annotations['comments'])\n self.comment_times = list(self.ABFblock.segments[0].eventarrays[0].times/self.trace.itemsize)\n self.comment_sweeps = list(self.comment_times)\n except:\n for events in self.ABFblock.segments[0].events:\n self.comment_tags = events.annotations['comments'].tolist()\n self.comment_times = np.array(events.times.magnitude/self.trace.itemsize)\n self.comment_sweeps = self.comment_times/self.sweepInterval\n for i,c in enumerate(self.comment_tags):\n self.comment_tags[i]=c.decode(\"utf-8\")", - "docstring": "read the header and populate self with information about comments" - }, - { - "code": "def _add_option(self, option):\n if option.name in self.options:\n raise ValueError('name already in use')\n if option.abbreviation in self.abbreviations:\n raise ValueError('abbreviation already in use')\n if option.name in [arg.name for arg in self.positional_args]:\n raise ValueError('name already in use by a positional argument')\n self.options[option.name] = option\n if option.abbreviation:\n self.abbreviations[option.abbreviation] = option\n self.option_order.append(option.name)", - "docstring": "Add an Option object to the user interface." - }, - { - "code": "def to_str(prev, encoding=None):\n first = next(prev)\n if isinstance(first, str):\n if encoding is None:\n yield first\n for s in prev:\n yield s\n else:\n yield first.encode(encoding)\n for s in prev:\n yield s.encode(encoding)\n else:\n if encoding is None:\n encoding = sys.stdout.encoding or 'utf-8'\n yield first.decode(encoding)\n for s in prev:\n yield s.decode(encoding)", - "docstring": "Convert data from previous pipe with specified encoding." - }, - { - "code": "def _get_col_index(name):\n index = string.ascii_uppercase.index\n col = 0\n for c in name.upper():\n col = col * 26 + index(c) + 1\n return col", - "docstring": "Convert column name to index." - }, - { - "code": "def simulate_as_gaussian(cls, shape, pixel_scale, sigma, centre=(0.0, 0.0), axis_ratio=1.0, phi=0.0):\n from autolens.model.profiles.light_profiles import EllipticalGaussian\n gaussian = EllipticalGaussian(centre=centre, axis_ratio=axis_ratio, phi=phi, intensity=1.0, sigma=sigma)\n grid_1d = grid_util.regular_grid_1d_masked_from_mask_pixel_scales_and_origin(mask=np.full(shape, False),\n pixel_scales=(\n pixel_scale, pixel_scale))\n gaussian_1d = gaussian.intensities_from_grid(grid=grid_1d)\n gaussian_2d = mapping_util.map_unmasked_1d_array_to_2d_array_from_array_1d_and_shape(array_1d=gaussian_1d,\n shape=shape)\n return PSF(array=gaussian_2d, pixel_scale=pixel_scale, renormalize=True)", - "docstring": "Simulate the PSF as an elliptical Gaussian profile." - }, - { - "code": "def add(self, *entries):\n for entry in entries:\n if isinstance(entry, string_types):\n self._add_entries(database.parse_string(entry, bib_format='bibtex'))\n else:\n self._add_entries(entry)", - "docstring": "Add a source, either specified by glottolog reference id, or as bibtex record." - }, - { - "code": "def reaction_weight(reaction):\n if len(reaction.metabolites) != 1:\n raise ValueError('Reaction weight is only defined for single '\n 'metabolite products or educts.')\n met, coeff = next(iteritems(reaction.metabolites))\n return [coeff * met.formula_weight]", - "docstring": "Return the metabolite weight times its stoichiometric coefficient." - }, - { - "code": "def _get_catalysts_in_reaction(reaction: Reaction) -> Set[BaseAbundance]:\n return {\n reactant\n for reactant in reaction.reactants\n if reactant in reaction.products\n }", - "docstring": "Return nodes that are both in reactants and reactions in a reaction." - }, - { - "code": "def remove_tweets(self, url):\n try:\n del self.cache[url]\n self.mark_updated()\n return True\n except KeyError:\n return False", - "docstring": "Tries to remove cached tweets." - }, - { - "code": "def id_to_object(self, line):\n user = User.get(line, ignore=404)\n if not user:\n user = User(username=line)\n user.save()\n return user", - "docstring": "Resolves the given id to a user object, if it doesn't exists it will be created." - }, - { - "code": "def _decrypt(self, hexified_value):\n encrypted_value = binascii.unhexlify(hexified_value)\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n jsonified_value = self.cipher.decrypt(\n encrypted_value).decode('ascii')\n value = json.loads(jsonified_value)\n return value", - "docstring": "The exact opposite of _encrypt" - }, - { - "code": "def print_cluster_info(cl_args):\n parsed_roles = read_and_parse_roles(cl_args)\n masters = list(parsed_roles[Role.MASTERS])\n slaves = list(parsed_roles[Role.SLAVES])\n zookeepers = list(parsed_roles[Role.ZOOKEEPERS])\n cluster = list(parsed_roles[Role.CLUSTER])\n info = OrderedDict()\n info['numNodes'] = len(cluster)\n info['nodes'] = cluster\n roles = OrderedDict()\n roles['masters'] = masters\n roles['slaves'] = slaves\n roles['zookeepers'] = zookeepers\n urls = OrderedDict()\n urls['serviceUrl'] = get_service_url(cl_args)\n urls['heronUi'] = get_heron_ui_url(cl_args)\n urls['heronTracker'] = get_heron_tracker_url(cl_args)\n info['roles'] = roles\n info['urls'] = urls\n print json.dumps(info, indent=2)", - "docstring": "get cluster info for standalone cluster" - }, - { - "code": "async def disconnect(self, code):\n try:\n await asyncio.wait(\n self.application_futures.values(),\n return_when=asyncio.ALL_COMPLETED,\n timeout=self.application_close_timeout\n )\n except asyncio.TimeoutError:\n pass", - "docstring": "default is to wait for the child applications to close." - }, - { - "code": "def SetWindowText(self, text: str) -> bool:\n handle = self.NativeWindowHandle\n if handle:\n return SetWindowText(handle, text)\n return False", - "docstring": "Call native SetWindowText if control has a valid native handle." - }, - { - "code": "def tradepile(self):\n method = 'GET'\n url = 'tradepile'\n rc = self.__request__(method, url)\n events = [self.pin.event('page_view', 'Hub - Transfers'), self.pin.event('page_view', 'Transfer List - List View')]\n if rc.get('auctionInfo'):\n events.append(self.pin.event('page_view', 'Item - Detail View'))\n self.pin.send(events)\n return [itemParse(i) for i in rc.get('auctionInfo', ())]", - "docstring": "Return items in tradepile." - }, - { - "code": "def construct_inlines(self):\n inline_formsets = []\n for inline_class in self.get_inlines():\n inline_instance = inline_class(self.model, self.request, self.object, self.kwargs, self)\n inline_formset = inline_instance.construct_formset()\n inline_formsets.append(inline_formset)\n return inline_formsets", - "docstring": "Returns the inline formset instances" - }, - { - "code": "def _show_warning(message, category, filename, lineno, file=None, line=None):\n if file is None:\n file = sys.stderr\n if file is None:\n return\n try:\n file.write(formatwarning(message, category, filename, lineno, line))\n except (IOError, UnicodeError):\n pass", - "docstring": "Hook to write a warning to a file; replace if you like." - }, - { - "code": "def _rows(self, spec):\n rows = self.new_row_collection()\n for row in spec:\n rows.append(self._row(row))\n return rows", - "docstring": "Parse a collection of rows." - }, - { - "code": "def computeGaussKernel(x):\n xnorm = np.power(euclidean_distances(x, x), 2)\n return np.exp(-xnorm / (2.0))", - "docstring": "Compute the gaussian kernel on a 1D vector." - }, - { - "code": "def _load_plt(self, filename):\n g = gOpenMol.Plt()\n g.read(filename)\n grid, edges = g.histogramdd()\n self.__init__(grid=grid, edges=edges, metadata=self.metadata)", - "docstring": "Initialize Grid from gOpenMol plt file." - }, - { - "code": "def dump(self):\n for table in self.tables:\n print(\"*** %s ***\" % table.name)\n table.dump()", - "docstring": "Regurgitate the tables and rows" - }, - { - "code": "def send_stream_tail(self):\n with self.lock:\n if not self._socket or self._hup:\n logger.debug(u\"Cannot send stream closing tag: already closed\")\n return\n data = self._serializer.emit_tail()\n try:\n self._write(data.encode(\"utf-8\"))\n except (IOError, SystemError, socket.error), err:\n logger.debug(u\"Sending stream closing tag failed: {0}\"\n .format(err))\n self._serializer = None\n self._hup = True\n if self._tls_state is None:\n try:\n self._socket.shutdown(socket.SHUT_WR)\n except socket.error:\n pass\n self._set_state(\"closing\")\n self._write_queue.clear()\n self._write_queue_cond.notify()", - "docstring": "Send stream tail via the transport." - }, - { - "code": "def hexDump(bytes):\n for i in range(len(bytes)):\n sys.stdout.write(\"%2x \" % (ord(bytes[i])))\n if (i+1) % 8 == 0:\n print repr(bytes[i-7:i+1])\n if(len(bytes) % 8 != 0):\n print string.rjust(\"\", 11), repr(bytes[i-len(bytes)%8:i+1])", - "docstring": "Useful utility; prints the string in hexadecimal" - }, - { - "code": "def connect(self):\n self._ftp.connect()\n self._ftp.login(user=self._username, passwd=self._passwd)", - "docstring": "Connects and logins to the server." - }, - { - "code": "def _get_param_names(cls):\n init = cls.__init__\n args, varargs = inspect.getargspec(init)[:2]\n if varargs is not None:\n raise RuntimeError('BaseTransformer objects cannot have varargs')\n args.pop(0)\n args.sort()\n return args", - "docstring": "Get the list of parameter names for the object" - }, - { - "code": "def node_has_namespace(node: BaseEntity, namespace: str) -> bool:\n ns = node.get(NAMESPACE)\n return ns is not None and ns == namespace", - "docstring": "Pass for nodes that have the given namespace." - }, - { - "code": "async def set_qtm_event(self, event=None):\n cmd = \"event%s\" % (\"\" if event is None else \" \" + event)\n return await asyncio.wait_for(\n self._protocol.send_command(cmd), timeout=self._timeout\n )", - "docstring": "Set event in QTM." - }, - { - "code": "def display(self):\n if not self.is_group():\n return self._display\n return ((force_text(k), v) for k, v in self._display)", - "docstring": "When dealing with optgroups, ensure that the value is properly force_text'd." - }, - { - "code": "def draw(self):\n self.ax.set_xlim(-self.plot_radius(), self.plot_radius())\n self.ax.set_ylim(-self.plot_radius(), self.plot_radius())\n self.add_axes_and_nodes()\n self.add_edges()\n self.ax.axis('off')", - "docstring": "The master function that is called that draws everything." - }, - { - "code": "def _hashable_bytes(data):\n if isinstance(data, bytes):\n return data\n elif isinstance(data, str):\n return data.encode('ascii')\n else:\n raise TypeError(data)", - "docstring": "Coerce strings to hashable bytes." - }, - { - "code": "def sameTMParams(tp1, tp2):\n result = True\n for param in [\"numberOfCols\", \"cellsPerColumn\", \"initialPerm\", \"connectedPerm\",\n \"minThreshold\", \"newSynapseCount\", \"permanenceInc\", \"permanenceDec\",\n \"permanenceMax\", \"globalDecay\", \"activationThreshold\",\n \"doPooling\", \"segUpdateValidDuration\",\n \"burnIn\", \"pamLength\", \"maxAge\"]:\n if getattr(tp1, param) != getattr(tp2,param):\n print param,\"is different\"\n print getattr(tp1, param), \"vs\", getattr(tp2,param)\n result = False\n return result", - "docstring": "Given two TM instances, see if any parameters are different." - }, - { - "code": "def linspace(self, start, stop, n):\n if n == 1: return [start]\n L = [0.0] * n\n nm1 = n - 1\n nm1inv = 1.0 / nm1\n for i in range(n):\n L[i] = nm1inv * (start*(nm1 - i) + stop*i)\n return L", - "docstring": "Simple replacement for numpy linspace" - }, - { - "code": "def file_remove(self, path):\n log.info('Remove '+path)\n cmd = 'file.remove(\"%s\")' % path\n res = self.__exchange(cmd)\n log.info(res)\n return res", - "docstring": "Removes a file on the device" - }, - { - "code": "def formatter(color, s):\n if no_coloring:\n return s\n return \"{begin}{s}{reset}\".format(begin=color, s=s, reset=Colors.RESET)", - "docstring": "Formats a string with color" - }, - { - "code": "def as_dict(self, default=None):\n settings = SettingDict(queryset=self, default=default)\n return settings", - "docstring": "Returns a ``SettingDict`` object for this queryset." - }, - { - "code": "def check_pidfile(pidfile, debug):\n if os.path.isfile(pidfile):\n pidfile_handle = open(pidfile, 'r')\n try:\n pid = int(pidfile_handle.read())\n pidfile_handle.close()\n if check_pid(pid, debug):\n return True\n except:\n pass\n os.unlink(pidfile)\n pid = str(os.getpid())\n open(pidfile, 'w').write(pid)\n return False", - "docstring": "Check that a process is not running more than once, using PIDFILE" - }, - { - "code": "def _rollback(self):\n last_pc, last_gas, last_instruction, last_arguments, fee, allocated = self._checkpoint_data\n self._push_arguments(last_arguments)\n self._gas = last_gas\n self._pc = last_pc\n self._allocated = allocated\n self._checkpoint_data = None", - "docstring": "Revert the stack, gas, pc and memory allocation so it looks like before executing the instruction" - }, - { - "code": "def _get_oldest_event_timestamp(self):\n query_events = Search(\n using=self.client,\n index=self.event_index\n )[0:1].sort(\n {'timestamp': {'order': 'asc'}}\n )\n result = query_events.execute()\n if len(result) == 0:\n return None\n return parser.parse(result[0]['timestamp'])", - "docstring": "Search for the oldest event timestamp." - }, - { - "code": "def blend(self, clr, factor=0.5):\n r = self.r * (1 - factor) + clr.r * factor\n g = self.g * (1 - factor) + clr.g * factor\n b = self.b * (1 - factor) + clr.b * factor\n a = self.a * (1 - factor) + clr.a * factor\n return Color(r, g, b, a, mode=\"rgb\")", - "docstring": "Returns a mix of two colors." - }, - { - "code": "def get(vals, key, default_val=None):\n val = vals\n for part in key.split('.'):\n if isinstance(val, dict):\n val = val.get(part, None)\n if val is None:\n return default_val\n else:\n return default_val\n return val", - "docstring": "Returns a dictionary value" - }, - { - "code": "def format_pairs(self, values):\n return ', '.join(\n '%s=%s' % (key, value) for key, value in sorted(values.items()))", - "docstring": "Returns a string of comma-delimited key=value pairs." - }, - { - "code": "def dump(self, itemkey, filename=None, path=None):\n if not filename:\n filename = self.item(itemkey)[\"data\"][\"filename\"]\n if path:\n pth = os.path.join(path, filename)\n else:\n pth = filename\n file = self.file(itemkey)\n if self.snapshot:\n self.snapshot = False\n pth = pth + \".zip\"\n with open(pth, \"wb\") as f:\n f.write(file)", - "docstring": "Dump a file attachment to disk, with optional filename and path" - }, - { - "code": "def graph_evaluation(data, adj_matrix, gpu=None, gpu_id=0, **kwargs):\n gpu = SETTINGS.get_default(gpu=gpu)\n device = 'cuda:{}'.format(gpu_id) if gpu else 'cpu'\n obs = th.FloatTensor(data).to(device)\n cgnn = CGNN_model(adj_matrix, data.shape[0], gpu_id=gpu_id, **kwargs).to(device)\n cgnn.reset_parameters()\n return cgnn.run(obs, **kwargs)", - "docstring": "Evaluate a graph taking account of the hardware." - }, - { - "code": "def compartments(self):\n if self._compartments is None:\n self._compartments = {met.compartment for met in self._metabolites\n if met.compartment is not None}\n return self._compartments", - "docstring": "lists compartments the metabolites are in" - }, - { - "code": "def goto_assignments(request_data):\n code = request_data['code']\n line = request_data['line'] + 1\n column = request_data['column']\n path = request_data['path']\n encoding = 'utf-8'\n script = jedi.Script(code, line, column, path, encoding)\n try:\n definitions = script.goto_assignments()\n except jedi.NotFoundError:\n pass\n else:\n ret_val = [(d.module_path, d.line - 1 if d.line else None,\n d.column, d.full_name)\n for d in definitions]\n return ret_val", - "docstring": "Go to assignements worker." - }, - { - "code": "def _collapse_outgroup(tree, taxdicts):\n outg = taxdicts[0][\"p4\"]\n if not all([i[\"p4\"] == outg for i in taxdicts]):\n raise Exception(\"no good\")\n tre = ete.Tree(tree.write(format=1))\n alltax = [i for i in tre.get_leaf_names() if i not in outg]\n alltax += [outg[0]]\n tre.prune(alltax)\n tre.search_nodes(name=outg[0])[0].name = \"outgroup\"\n tre.ladderize()\n taxd = copy.deepcopy(taxdicts)\n newtaxdicts = []\n for test in taxd:\n test[\"p4\"] = [\"outgroup\"]\n newtaxdicts.append(test)\n return tre, newtaxdicts", - "docstring": "collapse outgroup in ete Tree for easier viewing" - }, - { - "code": "def main(target, label):\n check_environment(target, label)\n click.secho('Fetching tags from the upstream ...')\n handler = TagHandler(git.list_tags())\n print_information(handler, label)\n tag = handler.yield_tag(target, label)\n confirm(tag)", - "docstring": "Semver tag triggered deployment helper" - }, - { - "code": "def getAllRecords(self):\n values=[]\n numRecords = self.fields[0].numRecords\n assert (all(field.numRecords==numRecords for field in self.fields))\n for x in range(numRecords):\n values.append(self.getRecord(x))\n return values", - "docstring": "Returns all the records" - }, - { - "code": "def list(self,\n key_name=None,\n max_suggestions=100,\n cutoff=0.5,\n locked_only=False,\n key_type=None):\n self._assert_valid_stash()\n key_list = [k for k in self._storage.list()\n if k['name'] != 'stored_passphrase' and\n (k.get('lock') if locked_only else True)]\n if key_type:\n types = ('secret', None) if key_type == 'secret' else [key_type]\n key_list = [k for k in key_list if k.get('type') in types]\n key_list = [k['name'] for k in key_list]\n if key_name:\n if key_name.startswith('~'):\n key_list = difflib.get_close_matches(\n key_name.lstrip('~'), key_list, max_suggestions, cutoff)\n else:\n key_list = [k for k in key_list if key_name in k]\n audit(\n storage=self._storage.db_path,\n action='LIST' + ('[LOCKED]' if locked_only else ''),\n message=json.dumps(dict()))\n return key_list", - "docstring": "Return a list of all keys." - }, - { - "code": "def centralManager_didConnectPeripheral_(self, manager, peripheral):\n logger.debug('centralManager_didConnectPeripheral called')\n peripheral.setDelegate_(self)\n peripheral.discoverServices_(None)\n device = device_list().get(peripheral)\n if device is not None:\n device._set_connected()", - "docstring": "Called when a device is connected." - }, - { - "code": "def update_eig_J(self):\n CLOG.debug('Eigen update.')\n vls, vcs = np.linalg.eigh(self.JTJ)\n res0 = self.calc_residuals()\n for a in range(min([self.num_eig_dirs, vls.size])):\n stif_dir = vcs[-(a+1)]\n dl = self.eig_dl\n _ = self.update_function(self.param_vals + dl*stif_dir)\n res1 = self.calc_residuals()\n grad_stif = (res1-res0)/dl\n self._rank_1_J_update(stif_dir, grad_stif)\n self.JTJ = np.dot(self.J, self.J.T)\n _ = self.update_function(self.param_vals)", - "docstring": "Execute an eigen update of J" - }, - { - "code": "def _f_gene(sid, prefix=\"G_\"):\n sid = sid.replace(SBML_DOT, \".\")\n return _clip(sid, prefix)", - "docstring": "Clips gene prefix from id." - }, - { - "code": "def _getStateAnomalyVector(self, state):\n vector = numpy.zeros(self._anomalyVectorLength)\n vector[state.anomalyVector] = 1\n return vector", - "docstring": "Returns a state's anomaly vertor converting it from spare to dense" - }, - { - "code": "def InColorspace(to_colorspace, from_colorspace=\"RGB\", children=None, name=None, deterministic=False,\n random_state=None):\n return WithColorspace(to_colorspace, from_colorspace, children, name, deterministic, random_state)", - "docstring": "Convert images to another colorspace." - }, - { - "code": "def generate_context(self, album):\n from . import __url__ as sigal_link\n self.logger.info(\"Output album : %r\", album)\n return {\n 'album': album,\n 'index_title': self.index_title,\n 'settings': self.settings,\n 'sigal_link': sigal_link,\n 'theme': {'name': os.path.basename(self.theme),\n 'url': url_from_path(os.path.relpath(self.theme_path,\n album.dst_path))},\n }", - "docstring": "Generate the context dict for the given path." - }, - { - "code": "def centerdc_2_twosided(data):\n N = len(data)\n newpsd = np.concatenate((data[N//2:], (cshift(data[0:N//2], -1))))\n return newpsd", - "docstring": "Convert a center-dc PSD to a twosided PSD" - }, - { - "code": "def validate(cls, policy):\n return policy in [cls.PUBLIC, cls.MEMBERS, cls.ADMINS]", - "docstring": "Validate privacy policy value." - }, - { - "code": "def getInputNames(self):\n inputs = self.getSpec().inputs\n return [inputs.getByIndex(i)[0] for i in xrange(inputs.getCount())]", - "docstring": "Returns list of input names in spec." - }, - { - "code": "def _disconnect_user_post_save_for_migrations(self, sender, **kwargs):\n from django.db.models.signals import post_save\n post_save.disconnect(sender=self.auth_user_model, dispatch_uid=USER_POST_SAVE_DISPATCH_UID)", - "docstring": "Handle pre_migrate signal - disconnect User post_save handler." - }, - { - "code": "def _expand_produced_mesh(self, mesh, mesh_index, row_position, passed):\n if not mesh.is_consumed():\n return\n row = mesh.consuming_row\n position = Point(\n row_position.x - mesh.index_in_consuming_row + mesh_index,\n row_position.y + INSTRUCTION_HEIGHT\n )\n self._expand(row, position, passed)", - "docstring": "expand the produced meshes" - }, - { - "code": "def split_elements(value):\n items = [v.strip() for v in value.split(',')]\n if len(items) == 1:\n items = value.split()\n return items", - "docstring": "Split a string with comma or space-separated elements into a list." - }, - { - "code": "def memcopy(self, stream, offset=0, length=float(\"inf\")):\n data = [ord(i) for i in list(stream)]\n size = min(length, len(data), self.m_size)\n buff = cast(self.m_buf, POINTER(c_uint8))\n for i in range(size):\n buff[offset + i] = data[i]", - "docstring": "Copy stream to buffer" - }, - { - "code": "def _add_tc_script(self):\n context = dict(tc_options=self.config.get('tc_options', []))\n contents = self._render_template('tc_script.sh', context)\n self.config.setdefault('files', [])\n self._add_unique_file({\n \"path\": \"/tc_script.sh\",\n \"contents\": contents,\n \"mode\": \"755\"\n })", - "docstring": "generates tc_script.sh and adds it to included files" - }, - { - "code": "def getAvgBySweep(abf,feature,T0=None,T1=None):\n if T1 is None:\n T1=abf.sweepLength\n if T0 is None:\n T0=0\n data = [np.empty((0))]*abf.sweeps\n for AP in cm.dictFlat(cm.matrixToDicts(abf.APs)):\n if T01 and np.any(data[sweep]):\n data[sweep]=np.nanmean(data[sweep])\n elif len(data[sweep])==1:\n data[sweep]=data[sweep][0]\n else:\n data[sweep]=np.nan\n return data", - "docstring": "return average of a feature divided by sweep." - }, - { - "code": "def tsuite_exit(trun, tsuite):\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:tsuite:exit\")\n rcode = 0\n for hook in reversed(tsuite[\"hooks\"][\"exit\"]):\n rcode = script_run(trun, hook)\n if rcode:\n break\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:tsuite:exit { rcode: %r } \" % rcode, rcode)\n return rcode", - "docstring": "Triggers when exiting the given testsuite" - }, - { - "code": "def toJSON(self):\n return {\"id\": self.id,\n \"compile\": self.compile,\n \"position\": self.position,\n \"version\": self.version}", - "docstring": "Get a json dict of the attributes of this object." - }, - { - "code": "def maybe_download_and_extract():\n dest_directory = '.'\n filename = DATA_URL.split('/')[-1]\n filepath = os.path.join(dest_directory, filename)\n if not os.path.exists(filepath):\n def _progress(count, block_size, total_size):\n sys.stdout.write('\\r>> Downloading %s %.1f%%' % (filename,\n float(count * block_size) / float(total_size) * 100.0))\n sys.stdout.flush()\n filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)\n print()\n statinfo = os.stat(filepath)\n print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')\n extracted_dir_path = os.path.join(dest_directory, 'trees')\n if not os.path.exists(extracted_dir_path):\n zip_ref = zipfile.ZipFile(filepath, 'r')\n zip_ref.extractall(dest_directory)\n zip_ref.close()", - "docstring": "Download and extract processed data and embeddings." - }, - { - "code": "def cmd_up(opts):\n config = load_config(opts.config)\n b = get_blockade(config, opts)\n containers = b.create(verbose=opts.verbose, force=opts.force)\n print_containers(containers, opts.json)", - "docstring": "Start the containers and link them together" - }, - { - "code": "def count(self, *args, **kwargs):\n search = self.create_search(*args, **kwargs)\n try:\n return search.count()\n except NotFoundError:\n print_error(\"The index was not found, have you initialized the index?\")\n except (ConnectionError, TransportError):\n print_error(\"Cannot connect to elasticsearch\")", - "docstring": "Returns the number of results after filtering with the given arguments." - }, - { - "code": "def contained_in(filename, directory):\n filename = os.path.normcase(os.path.abspath(filename))\n directory = os.path.normcase(os.path.abspath(directory))\n return os.path.commonprefix([filename, directory]) == directory", - "docstring": "Test if a file is located within the given directory." - }, - { - "code": "def peripheral_didUpdateValueForCharacteristic_error_(self, peripheral, characteristic, error):\n logger.debug('peripheral_didUpdateValueForCharacteristic_error called')\n if error is not None:\n return\n device = device_list().get(peripheral)\n if device is not None:\n device._characteristic_changed(characteristic)", - "docstring": "Called when characteristic value was read or updated." - }, - { - "code": "def HardwareInput(uMsg: int, param: int = 0) -> INPUT:\n return _CreateInput(HARDWAREINPUT(uMsg, param & 0xFFFF, param >> 16 & 0xFFFF))", - "docstring": "Create Win32 struct `HARDWAREINPUT` for `SendInput`." - }, - { - "code": "def parsemail(raw_message):\n message = email.parser.Parser().parsestr(raw_message)\n detected = chardet.detect(bytearray(raw_message, \"utf-8\"))\n encoding = detected[\"encoding\"]\n print(\">>> encoding {}\".format(encoding))\n for part in message.walk():\n if part.get_content_maintype() == 'multipart':\n continue\n part.set_charset(encoding)\n addrs = email.utils.getaddresses(message.get_all(\"TO\", [])) + \\\n email.utils.getaddresses(message.get_all(\"CC\", [])) + \\\n email.utils.getaddresses(message.get_all(\"BCC\", []))\n recipients = [x[1] for x in addrs]\n message.__delitem__(\"bcc\")\n message.__setitem__('Date', email.utils.formatdate())\n sender = message[\"from\"]\n return (message, sender, recipients)", - "docstring": "Parse message headers, then remove BCC header." - }, - { - "code": "def excerpt(self):\n if \"content\" not in self._results_fields:\n return None\n match_phrases = [self._match_phrase]\n if six.PY2:\n separate_phrases = [\n phrase.decode('utf-8')\n for phrase in shlex.split(self._match_phrase.encode('utf-8'))\n ]\n else:\n separate_phrases = [\n phrase\n for phrase in shlex.split(self._match_phrase)\n ]\n if len(separate_phrases) > 1:\n match_phrases.extend(separate_phrases)\n else:\n match_phrases = separate_phrases\n matches = SearchResultProcessor.find_matches(\n SearchResultProcessor.strings_in_dictionary(self._results_fields[\"content\"]),\n match_phrases,\n DESIRED_EXCERPT_LENGTH\n )\n excerpt_text = ELLIPSIS.join(matches)\n for match_word in match_phrases:\n excerpt_text = SearchResultProcessor.decorate_matches(excerpt_text, match_word)\n return excerpt_text", - "docstring": "Property to display a useful excerpt representing the matches within the results" - }, - { - "code": "def param_particle(self, ind):\n ind = self._vps(listify(ind))\n return [self._i2p(i, j) for i in ind for j in ['z', 'y', 'x', 'a']]", - "docstring": "Get position and radius of one or more particles" - }, - { - "code": "def lookup(username, reponame):\n mgr = plugins_get_mgr()\n repomgr = mgr.get(what='repomanager', name='git')\n repo = repomgr.lookup(username=username,\n reponame=reponame)\n return repo", - "docstring": "Lookup a repo based on username reponame" - }, - { - "code": "def filename(file_name, start_on=None, ignore=(), use_short=True, **queries):\n with open(file_name) as template_file:\n return file(template_file, start_on=start_on, ignore=ignore, use_short=use_short, **queries)", - "docstring": "Returns a blox template from a valid file path" - }, - { - "code": "def scanner(self, j, word):\n \"For each edge expecting a word of this category here, extend the edge.\"\n for (i, j, A, alpha, Bb) in self.chart[j]:\n if Bb and self.grammar.isa(word, Bb[0]):\n self.add_edge([i, j+1, A, alpha + [(Bb[0], word)], Bb[1:]])", - "docstring": "For each edge expecting a word of this category here, extend the edge." - }, - { - "code": "def _count_PIS(seqsamp, N):\n counts = [Counter(col) for col in seqsamp.T if not (\"-\" in col or \"N\" in col)]\n pis = [i.most_common(2)[1][1] > 1 for i in counts if len(i.most_common(2))>1]\n if sum(pis) >= N:\n return sum(pis)\n else:\n return 0", - "docstring": "filters for loci with >= N PIS" - }, - { - "code": "async def throw(response, loads=None, encoding=None, **kwargs):\n if loads is None:\n loads = data_processing.loads\n data = await data_processing.read(response, loads=loads,\n encoding=encoding)\n error = get_error(data)\n if error is not None:\n exception = errors[error['code']]\n raise exception(response=response, error=error, data=data, **kwargs)\n if response.status in statuses:\n exception = statuses[response.status]\n raise exception(response=response, data=data, **kwargs)\n raise PeonyException(response=response, data=data, **kwargs)", - "docstring": "Get the response data if possible and raise an exception" - }, - { - "code": "def fetch_organization_courses(organization):\n organization_obj = serializers.deserialize_organization(organization)\n queryset = internal.OrganizationCourse.objects.filter(\n organization=organization_obj,\n active=True\n ).select_related('organization')\n return [serializers.serialize_organization_with_course(organization) for organization in queryset]", - "docstring": "Retrieves the set of courses currently linked to the specified organization" - }, - { - "code": "def p_file_cr_text(self, f_term, predicate):\n try:\n for _, _, cr_text in self.graph.triples((f_term, predicate, None)):\n self.builder.set_file_copyright(self.doc, six.text_type(cr_text))\n except CardinalityError:\n self.more_than_one_error('file copyright text')", - "docstring": "Sets file copyright text." - }, - { - "code": "def create_application_version(self, version_label, key):\n out(\"Creating application version \" + str(version_label) + \" for \" + str(key))\n self.ebs.create_application_version(self.app_name, version_label,\n s3_bucket=self.aws.bucket, s3_key=self.aws.bucket_path+key)", - "docstring": "Creates an application version" - }, - { - "code": "def remove_organization_course(organization, course_key):\n _validate_organization_data(organization)\n _validate_course_key(course_key)\n return data.delete_organization_course(course_key=course_key, organization=organization)", - "docstring": "Removes the specfied course from the specified organization" - }, - { - "code": "def _getStartRow(self, bookmark):\n bookMarkDict = json.loads(bookmark)\n realpath = os.path.realpath(self._filename)\n bookMarkFile = bookMarkDict.get('filepath', None)\n if bookMarkFile != realpath:\n print (\"Ignoring bookmark due to mismatch between File's \"\n \"filename realpath vs. bookmark; realpath: %r; bookmark: %r\") % (\n realpath, bookMarkDict)\n return 0\n else:\n return bookMarkDict['currentRow']", - "docstring": "Extracts start row from the bookmark information" - }, - { - "code": "def import_parms(self, args):\n for key, val in args.items():\n self.set_parm(key, val)", - "docstring": "Import external dict to internal dict" - }, - { - "code": "def log_operation(entities, operation_name, params=None):\n if isinstance(entities, (list, tuple)):\n entities = list(entities)\n else:\n entities = [entities]\n p = {'name': operation_name, 'on': entities}\n if params:\n p['params'] = params\n _log(TYPE_CODES.OPERATION, p)", - "docstring": "Logs an operation done on an entity, possibly with other arguments" - }, - { - "code": "def split_list(l,N):\n npmode = isinstance(l,np.ndarray)\n if npmode:\n l=list(l)\n g=np.concatenate((np.array([0]),np.cumsum(split_integer(len(l),length=N))))\n s=[l[g[i]:g[i+1]] for i in range(N)]\n if npmode:\n s=[np.array(sl) for sl in s]\n return s", - "docstring": "Subdivide list into N lists" - }, - { - "code": "def open_s3(bucket):\n conn = boto.connect_s3(options.paved.s3.access_id, options.paved.s3.secret)\n try:\n bucket = conn.get_bucket(bucket)\n except boto.exception.S3ResponseError:\n bucket = conn.create_bucket(bucket)\n return bucket", - "docstring": "Opens connection to S3 returning bucket and key" - }, - { - "code": "def _make_future_features(node):\n assert isinstance(node, ast.ImportFrom)\n assert node.module == '__future__'\n features = FutureFeatures()\n for alias in node.names:\n name = alias.name\n if name in _FUTURE_FEATURES:\n if name not in _IMPLEMENTED_FUTURE_FEATURES:\n msg = 'future feature {} not yet implemented by grumpy'.format(name)\n raise util.ParseError(node, msg)\n setattr(features, name, True)\n elif name == 'braces':\n raise util.ParseError(node, 'not a chance')\n elif name not in _REDUNDANT_FUTURE_FEATURES:\n msg = 'future feature {} is not defined'.format(name)\n raise util.ParseError(node, msg)\n return features", - "docstring": "Processes a future import statement, returning set of flags it defines." - }, - { - "code": "def store(self, key: object, value: object):\r\n self._user_data.update({key: value})", - "docstring": "Stores custom user data." - }, - { - "code": "def getEncoding(self, n):\n assert (all(field.numEncodings>n for field in self.fields))\n encoding = np.concatenate([field.encodings[n] for field in self.fields])\n return encoding", - "docstring": "Returns the nth encoding" - }, - { - "code": "def register_operators(*operators):\n def validate(operator):\n if isoperator(operator):\n return True\n raise NotImplementedError('invalid operator: {}'.format(operator))\n def register(operator):\n for name in operator.operators:\n if name in Engine.operators:\n raise ValueError('operator name \"{}\" from {} is already '\n 'in use by other operator'.format(\n name,\n operator.__name__\n ))\n Engine.operators[name] = operator\n [register(operator) for operator in operators if validate(operator)]", - "docstring": "Registers one or multiple operators in the test engine." - }, - { - "code": "def inserted_indels(indels, ocatg):\n newcatg = np.zeros(ocatg.shape, dtype=np.uint32)\n for iloc in xrange(ocatg.shape[0]):\n indidx = np.where(indels[iloc, :])[0]\n if np.any(indidx):\n allrows = np.arange(ocatg.shape[1])\n mask = np.ones(allrows.shape[0], dtype=np.bool_)\n for idx in indidx:\n mask[idx] = False\n not_idx = allrows[mask == 1]\n newcatg[iloc][not_idx] = ocatg[iloc, :not_idx.shape[0]]\n else:\n newcatg[iloc] = ocatg[iloc]\n return newcatg", - "docstring": "inserts indels into the catg array" - }, - { - "code": "def update_dois(self):\n dois = record_get_field_instances(self.record, '024', ind1=\"7\")\n all_dois = {}\n for field in dois:\n subs = field_get_subfield_instances(field)\n subs_dict = dict(subs)\n if subs_dict.get('a'):\n if subs_dict['a'] in all_dois:\n record_delete_field(self.record, tag='024', ind1='7', field_position_global=field[4])\n continue\n all_dois[subs_dict['a']] = field", - "docstring": "Remove duplicate BibMatch DOIs." - }, - { - "code": "def watch(self):\n wm = pyinotify.WatchManager()\n self.notifier = pyinotify.Notifier(wm, default_proc_fun=self.callback)\n wm.add_watch(self.directory, pyinotify.ALL_EVENTS)\n try:\n self.notifier.loop()\n except (KeyboardInterrupt, AttributeError):\n print_notification(\"Stopping\")\n finally:\n self.notifier.stop()\n self.terminate_processes()", - "docstring": "Watches directory for changes" - }, - { - "code": "def connect(command, data=None, env=None, cwd=None):\n command_str = expand_args(command).pop()\n environ = dict(os.environ)\n environ.update(env or {})\n process = subprocess.Popen(command_str,\n universal_newlines=True,\n shell=False,\n env=environ,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n bufsize=0,\n cwd=cwd,\n )\n return ConnectedCommand(process=process)", - "docstring": "Spawns a new process from the given command." - }, - { - "code": "def _ignore_path(cls, path, ignore_list=None, white_list=None):\n ignore_list = ignore_list or []\n white_list = white_list or []\n return (cls._matches_patterns(path, ignore_list) and\n not cls._matches_patterns(path, white_list))", - "docstring": "Returns a whether a path should be ignored or not." - }, - { - "code": "def _allocateSpatialFDR(self, rfInput):\n if self._sfdr:\n return\n autoArgs = dict((name, getattr(self, name))\n for name in self._spatialArgNames)\n if ( (self.SpatialClass == CPPSpatialPooler) or\n (self.SpatialClass == PYSpatialPooler) ):\n autoArgs['columnDimensions'] = [self.columnCount]\n autoArgs['inputDimensions'] = [self.inputWidth]\n autoArgs['potentialRadius'] = self.inputWidth\n self._sfdr = self.SpatialClass(\n **autoArgs\n )", - "docstring": "Allocate the spatial pooler instance." - }, - { - "code": "def parse_10qk(self, response):\n loader = ReportItemLoader(response=response)\n item = loader.load_item()\n if 'doc_type' in item:\n doc_type = item['doc_type']\n if doc_type in ('10-Q', '10-K'):\n return item\n return None", - "docstring": "Parse 10-Q or 10-K XML report." - }, - { - "code": "def validate(self):\n warnings.warn(\n 'Property \"package.validate\" is deprecated.',\n UserWarning)\n descriptor = self.to_dict()\n self.profile.validate(descriptor)", - "docstring": "Validate this Data Package." - }, - { - "code": "def KeyboardInput(wVk: int, wScan: int, dwFlags: int = KeyboardEventFlag.KeyDown, time_: int = 0) -> INPUT:\n return _CreateInput(KEYBDINPUT(wVk, wScan, dwFlags, time_, None))", - "docstring": "Create Win32 struct `KEYBDINPUT` for `SendInput`." - }, - { - "code": "def _uptime_minix():\n try:\n f = open('/proc/uptime', 'r')\n up = float(f.read())\n f.close()\n return up\n except (IOError, ValueError):\n return None", - "docstring": "Returns uptime in seconds or None, on MINIX." - }, - { - "code": "def reset_annotations(self):\n self.annotation_date_set = False\n self.annotation_comment_set = False\n self.annotation_type_set = False\n self.annotation_spdx_id_set = False", - "docstring": "Resets the builder's state to allow building new annotations." - }, - { - "code": "def refresh(self):\n asset = self.blockchain.rpc.get_asset(self.identifier)\n if not asset:\n raise AssetDoesNotExistsException(self.identifier)\n super(Asset, self).__init__(asset, blockchain_instance=self.blockchain)\n if self.full:\n if \"bitasset_data_id\" in asset:\n self[\"bitasset_data\"] = self.blockchain.rpc.get_object(\n asset[\"bitasset_data_id\"]\n )\n self[\"dynamic_asset_data\"] = self.blockchain.rpc.get_object(\n asset[\"dynamic_asset_data_id\"]\n )", - "docstring": "Refresh the data from the API server" - }, - { - "code": "def close(self):\n self._logger.info(\"Closing\")\n if self._pool is not None:\n self._pool.close()\n self._pool = None\n else:\n self._logger.warning(\n \"close() called, but connection policy was alredy closed\")\n return", - "docstring": "Close the policy instance and its database connection pool." - }, - { - "code": "def write(self, data):\n args = parse_qs(self.handler.environ.get(\"QUERY_STRING\"))\n if \"i\" in args:\n i = args[\"i\"]\n else:\n i = \"0\"\n super(JSONPolling, self).write(\"io.j[%s]('%s');\" % (i, data))", - "docstring": "Just quote out stuff before sending it out" - }, - { - "code": "def run(self, ctx):\n if ctx.reverse:\n self.engine.reverse()\n if self.engine.empty:\n raise AssertionError('grappa: no assertions to run')\n try:\n return self.run_assertions(ctx)\n except Exception as _err:\n if getattr(_err, '__legit__', False):\n raise _err\n return self.render_error(ctx, _err)", - "docstring": "Runs the current phase." - }, - { - "code": "def parse_unknown_args(args):\n retval = {}\n preceded_by_key = False\n for arg in args:\n if arg.startswith('--'):\n if '=' in arg:\n key = arg.split('=')[0][2:]\n value = arg.split('=')[1]\n retval[key] = value\n else:\n key = arg[2:]\n preceded_by_key = True\n elif preceded_by_key:\n retval[key] = arg\n preceded_by_key = False\n return retval", - "docstring": "Parse arguments not consumed by arg parser into a dicitonary" - }, - { - "code": "def strip_comment_line_with_symbol(line, start):\n parts = line.split(start)\n counts = [len(findall(r'(?:^|[^\"\\\\]|(?:\\\\\\\\|\\\\\")+)(\")', part))\n for part in parts]\n total = 0\n for nr, count in enumerate(counts):\n total += count\n if total % 2 == 0:\n return start.join(parts[:nr + 1]).rstrip()\n else:\n return line.rstrip()", - "docstring": "Strip comments from line string." - }, - { - "code": "def fact(self, name):\n facts = self.facts(name=name)\n return next(fact for fact in facts)", - "docstring": "Get a single fact from this node." - }, - { - "code": "def connect(self):\n for tried_connection_count in range(CFG_FTP_CONNECTION_ATTEMPTS):\n try:\n self.ftp = FtpHandler(self.config.OXFORD.URL,\n self.config.OXFORD.LOGIN,\n self.config.OXFORD.PASSWORD)\n self.logger.debug((\"Successful connection to the \"\n \"Oxford University Press server\"))\n return\n except socket_timeout_exception as err:\n self.logger.error(('Failed to connect %d of %d times. '\n 'Will sleep for %d seconds and try again.')\n % (tried_connection_count+1,\n CFG_FTP_CONNECTION_ATTEMPTS,\n CFG_FTP_TIMEOUT_SLEEP_DURATION))\n time.sleep(CFG_FTP_TIMEOUT_SLEEP_DURATION)\n except Exception as err:\n self.logger.error(('Failed to connect to the Oxford '\n 'University Press server. %s') % (err,))\n break\n raise LoginException(err)", - "docstring": "Logs into the specified ftp server and returns connector." - }, - { - "code": "def _generate_index(self):\n self._dict = {v.id: k for k, v in enumerate(self)}", - "docstring": "rebuild the _dict index" - }, - { - "code": "def _extract(data, session=None):\r\n if isinstance(data, list):\r\n return [_extract(d, session) for d in data]\r\n if not isinstance(data, np.ndarray):\r\n return data\r\n if isinstance(data, MatlabObject):\r\n cls = session._get_user_class(data.classname)\r\n return cls.from_value(data)\r\n if data.dtype.names:\r\n if data.size == 1:\r\n return _create_struct(data, session)\r\n return StructArray(data, session)\r\n if data.dtype.kind == 'O':\r\n return Cell(data, session)\r\n if data.size == 1:\r\n return data.item()\r\n if data.size == 0:\r\n if data.dtype.kind in 'US':\r\n return ''\r\n return []\r\n return data", - "docstring": "Convert the Octave values to values suitable for Python." - }, - { - "code": "def _get_registered_executable(exe_name):\n registered = None\n if sys.platform.startswith('win'):\n if os.path.splitext(exe_name)[1].lower() != '.exe':\n exe_name += '.exe'\n import _winreg\n try:\n key = \"SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\App Paths\\\\\" + exe_name\n value = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key)\n registered = (value, \"from HKLM\\\\\"+key)\n except _winreg.error:\n pass\n if registered and not os.path.exists(registered[0]):\n registered = None\n return registered", - "docstring": "Windows allow application paths to be registered in the registry." - }, - { - "code": "def new(self, text, shorten=None, now=None, top=None, media=None, when=None):\n url = PATHS['CREATE']\n post_data = \"text=%s&\" % text\n post_data += \"profile_ids[]=%s&\" % self.profile_id\n if shorten:\n post_data += \"shorten=%s&\" % shorten\n if now:\n post_data += \"now=%s&\" % now\n if top:\n post_data += \"top=%s&\" % top\n if when:\n post_data += \"scheduled_at=%s&\" % str(when)\n if media:\n media_format = \"media[%s]=%s&\"\n for media_type, media_item in media.iteritems():\n post_data += media_format % (media_type, media_item)\n response = self.api.post(url=url, data=post_data)\n new_update = Update(api=self.api, raw_response=response['updates'][0])\n self.append(new_update)\n return new_update", - "docstring": "Create one or more new status updates." - }, - { - "code": "def merge_configs(main, tweaks):\n for section in tweaks.sections():\n for option in tweaks.options(section):\n value = tweaks.get(section, option)\n if option.endswith(\"+\"):\n option = option[:-1]\n value = main.get(section, option) + value\n main.set(section, option, value)", - "docstring": "Merge tweaks into a main config file." - }, - { - "code": "def search_projects(session,\n query,\n search_filter=None,\n project_details=None,\n user_details=None,\n limit=10,\n offset=0,\n active_only=None):\n search_data = {\n 'query': query,\n 'limit': limit,\n 'offset': offset,\n }\n if search_filter:\n search_data.update(search_filter)\n if project_details:\n search_data.update(project_details)\n if user_details:\n search_data.update(user_details)\n endpoint = 'projects/{}'.format('active' if active_only else 'all')\n response = make_get_request(session, endpoint, params_data=search_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['result']\n else:\n raise ProjectsNotFoundException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Search for all projects" - }, - { - "code": "def acl_show(self, msg, args):\n name = args[0] if len(args) > 0 else None\n if name is None:\n return \"%s: The following ACLs are defined: %s\" % (msg.user, ', '.join(self._acl.keys()))\n if name not in self._acl:\n return \"Sorry, couldn't find an acl named '%s'\" % name\n return '\\n'.join([\n \"%s: ACL '%s' is defined as follows:\" % (msg.user, name),\n \"allow: %s\" % ', '.join(self._acl[name]['allow']),\n \"deny: %s\" % ', '.join(self._acl[name]['deny'])\n ])", - "docstring": "Show current allow and deny blocks for the given acl." - }, - { - "code": "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('protofilepath')\n args = parser.parse_args()\n out_file = compile_protofile(args.protofilepath)\n with open(out_file, 'rb') as proto_file:\n file_descriptor_set = descriptor_pb2.FileDescriptorSet.FromString(\n proto_file.read()\n )\n for file_descriptor in file_descriptor_set.file:\n locations = {}\n for location in file_descriptor.source_code_info.location:\n locations[tuple(location.path)] = location\n print(make_comment('This file was automatically generated from {} and '\n 'should not be edited directly.'\n .format(args.protofilepath)))\n for index, message_desc in enumerate(file_descriptor.message_type):\n generate_message_doc(message_desc, locations, (4, index))\n for index, enum_desc in enumerate(file_descriptor.enum_type):\n generate_enum_doc(enum_desc, locations, (5, index))", - "docstring": "Parse arguments and print generated documentation to stdout." - }, - { - "code": "async def connect(self, channel_id: int):\r\n ws = self._lavalink.bot._connection._get_websocket(int(self.guild_id))\r\n await ws.voice_state(self.guild_id, str(channel_id))", - "docstring": "Connects to a voice channel." - }, - { - "code": "def _validate_program(self):\n program = self.cleaned_data.get(self.Fields.PROGRAM)\n if not program:\n return\n course_runs = get_course_runs_from_program(program)\n try:\n client = CourseCatalogApiClient(self._user, self._enterprise_customer.site)\n available_modes = client.get_common_course_modes(course_runs)\n course_mode = self.cleaned_data.get(self.Fields.COURSE_MODE)\n except (HttpClientError, HttpServerError):\n raise ValidationError(\n ValidationMessages.FAILED_TO_OBTAIN_COURSE_MODES.format(program_title=program.get(\"title\"))\n )\n if not course_mode:\n raise ValidationError(ValidationMessages.COURSE_WITHOUT_COURSE_MODE)\n if course_mode not in available_modes:\n raise ValidationError(ValidationMessages.COURSE_MODE_NOT_AVAILABLE.format(\n mode=course_mode, program_title=program.get(\"title\"), modes=\", \".join(available_modes)\n ))", - "docstring": "Verify that selected mode is available for program and all courses in the program" - }, - { - "code": "def mfbe(a, b):\n return 2 * bias(a, b) / (a.mean() + b.mean())", - "docstring": "Returns the mean fractionalized bias error" - }, - { - "code": "def load(self, id):\n self.clear()\n self.add_node(id, root=True)\n for w, id2 in self.get_links(id):\n self.add_edge(id, id2, weight=w)\n if len(self) > self.max: \n break\n for w, id2, links in self.get_cluster(id):\n for id3 in links:\n self.add_edge(id3, id2, weight=w)\n self.add_edge(id, id3, weight=w)\n if len(self) > self.max: \n break \n if self.event.clicked: \n g.add_node(self.event.clicked)", - "docstring": "Rebuilds the graph around the given node id." - }, - { - "code": "def score(self):\n \"The total score for the words found, according to the rules.\"\n return sum([self.scores[len(w)] for w in self.words()])", - "docstring": "The total score for the words found, according to the rules." - }, - { - "code": "def handle_extends(self, text):\n match = self.re_extends.match(text)\n if match:\n extra_text = self.re_extends.sub('', text, count=1)\n blocks = self.get_blocks(extra_text)\n path = os.path.join(self.base_dir, match.group('path'))\n with open(path, encoding='utf-8') as fp:\n return self.replace_blocks_in_extends(fp.read(), blocks)\n else:\n return None", - "docstring": "replace all blocks in extends with current blocks" - }, - { - "code": "def _apply_unique_checks(self, i, r, unique_sets,\n summarize=False,\n context=None):\n for key, code, message in self._unique_checks:\n value = None\n values = unique_sets[key]\n if isinstance(key, basestring):\n fi = self._field_names.index(key)\n if fi >= len(r):\n continue\n value = r[fi]\n else:\n value = []\n for f in key:\n fi = self._field_names.index(f)\n if fi >= len(r):\n break\n value.append(r[fi])\n value = tuple(value)\n if value in values:\n p = {'code': code}\n if not summarize:\n p['message'] = message\n p['row'] = i + 1\n p['record'] = r\n p['key'] = key\n p['value'] = value\n if context is not None: p['context'] = context\n yield p\n values.add(value)", - "docstring": "Apply unique checks on `r`." - }, - { - "code": "def create_reg_message(self):\n msg = {'parsl_v': PARSL_VERSION,\n 'python_v': \"{}.{}.{}\".format(sys.version_info.major,\n sys.version_info.minor,\n sys.version_info.micro),\n 'os': platform.system(),\n 'hname': platform.node(),\n 'dir': os.getcwd(),\n }\n b_msg = json.dumps(msg).encode('utf-8')\n return b_msg", - "docstring": "Creates a registration message to identify the worker to the interchange" - }, - { - "code": "def center_of_mass(bodies):\n x = np.zeros(3.)\n t = 0.\n for b in bodies:\n m = b.mass\n x += b.body_to_world(m.c) * m.mass\n t += m.mass\n return x / t", - "docstring": "Given a set of bodies, compute their center of mass in world coordinates." - }, - { - "code": "def add_job_to_context(context, job_id):\n db_job = db_api.async_transaction_find(\n context, id=job_id, scope=db_api.ONE)\n if not db_job:\n return\n context.async_job = {\"job\": v._make_job_dict(db_job)}", - "docstring": "Adds job to neutron context for use later." - }, - { - "code": "def version(self):\n r = self.local_renderer\n with self.settings(hide('running', 'warnings'), warn_only=True):\n res = r.local('vagrant --version', capture=True)\n if res.failed:\n return None\n line = res.splitlines()[-1]\n version = re.match(r'Vagrant (?:v(?:ersion )?)?(.*)', line).group(1)\n return tuple(_to_int(part) for part in version.split('.'))", - "docstring": "Get the Vagrant version." - }, - { - "code": "def make_object(*args, typename=None, python_path=None, datatype=None, **kwds):\n datatype = datatype or import_symbol(typename, python_path)\n field_types = getattr(datatype, 'FIELD_TYPES', fields.FIELD_TYPES)\n return datatype(*args, **fields.component(kwds, field_types))", - "docstring": "Make an object from a symbol." - }, - { - "code": "def _load_expansion(self, key, root, pattern):\n path_pattern = os.path.join(root, pattern)\n expanded_paths = self._expand_pattern(path_pattern)\n specs=[]\n for (path, tags) in expanded_paths:\n filelist = [os.path.join(path,f) for f in os.listdir(path)] if os.path.isdir(path) else [path]\n for filepath in filelist:\n specs.append(dict(tags,**{key:os.path.abspath(filepath)}))\n return sorted(specs, key=lambda s: s[key])", - "docstring": "Loads the files that match the given pattern." - }, - { - "code": "def delete(self):\n\t\tif self.filters or self.notFilters:\n\t\t\treturn self.mdl.deleter.deleteMultiple(self.allOnlyIndexedFields())\n\t\treturn self.mdl.deleter.destroyModel()", - "docstring": "delete - Deletes all entries matching the filter criteria" - }, - { - "code": "def create(self):\n self.create_virtualenv()\n self.create_project()\n self.create_uwsgi_script()\n self.create_nginx_config()\n self.create_manage_scripts()\n logging.info('** Make sure to set proper permissions for the webserver user account on the var and log directories in the project root')", - "docstring": "Creates the full project" - }, - { - "code": "def as_dict(self):\n entry_dict = {}\n entry_dict['UUID'] = self.uuid\n entry_dict['Creation Date'] = self.time\n entry_dict['Time Zone'] = self.tz\n if self.tags:\n entry_dict['Tags'] = self.tags\n entry_dict['Entry Text'] = self.text\n entry_dict['Starred'] = self.starred\n entry_dict['Location'] = self.location\n return entry_dict", - "docstring": "Return a dict that represents the DayOneEntry" - }, - { - "code": "def jsonLogic(tests, data=None):\n if tests is None or not isinstance(tests, dict):\n return tests\n data = data or {}\n operator = list(tests.keys())[0]\n values = tests[operator]\n if not isinstance(values, list) and not isinstance(values, tuple):\n values = [values]\n values = [jsonLogic(val, data) for val in values]\n if operator == 'var':\n return get_var(data, *values)\n if operator == 'missing':\n return missing(data, *values)\n if operator == 'missing_some':\n return missing_some(data, *values)\n if operator not in operations:\n raise ValueError(\"Unrecognized operation %s\" % operator)\n return operations[operator](*values)", - "docstring": "Executes the json-logic with given data." - }, - { - "code": "def __advancePhase(self):\n self.__currentPhase = self.__phaseCycler.next()\n self.__currentPhase.enterPhase()\n return", - "docstring": "Advance to the next iteration cycle phase" - }, - { - "code": "def switch(template, version):\n temple.update.update(new_template=template, new_version=version)", - "docstring": "Switch a project's template to a different template." - }, - { - "code": "def h(self, node):\n \"h function is straight-line distance from a node's state to goal.\"\n locs = getattr(self.graph, 'locations', None)\n if locs:\n return int(distance(locs[node.state], locs[self.goal]))\n else:\n return infinity", - "docstring": "h function is straight-line distance from a node's state to goal." - }, - { - "code": "def environment_exists(self, env_name):\n response = self.ebs.describe_environments(application_name=self.app_name, environment_names=[env_name],\n include_deleted=False)\n return len(response['DescribeEnvironmentsResponse']['DescribeEnvironmentsResult']['Environments']) > 0 \\\n and response['DescribeEnvironmentsResponse']['DescribeEnvironmentsResult']['Environments'][0][\n 'Status'] != 'Terminated'", - "docstring": "Returns whether or not the given environment exists" - }, - { - "code": "def _parse(self, filename):\n self.names = {}\n with codecs.open(filename, encoding=\"iso8859-1\") as f:\n for line in f:\n if any(map(lambda c: 128 < ord(c) < 160, line)):\n line = line.encode(\"iso8859-1\").decode(\"windows-1252\")\n self._eat_name_line(line.strip())", - "docstring": "Opens data file and for each line, calls _eat_name_line" - }, - { - "code": "def build_reaction_string(self, use_metabolite_names=False):\n def format(number):\n return \"\" if number == 1 else str(number).rstrip(\".\") + \" \"\n id_type = 'id'\n if use_metabolite_names:\n id_type = 'name'\n reactant_bits = []\n product_bits = []\n for met in sorted(self._metabolites, key=attrgetter(\"id\")):\n coefficient = self._metabolites[met]\n name = str(getattr(met, id_type))\n if coefficient >= 0:\n product_bits.append(format(coefficient) + name)\n else:\n reactant_bits.append(format(abs(coefficient)) + name)\n reaction_string = ' + '.join(reactant_bits)\n if not self.reversibility:\n if self.lower_bound < 0 and self.upper_bound <= 0:\n reaction_string += ' <-- '\n else:\n reaction_string += ' --> '\n else:\n reaction_string += ' <=> '\n reaction_string += ' + '.join(product_bits)\n return reaction_string", - "docstring": "Generate a human readable reaction string" - }, - { - "code": "def create_pipe_workers(configfile, directory):\n type_map = {'service': ServiceSearch,\n 'host': HostSearch, 'range': RangeSearch,\n 'user': UserSearch}\n config = configparser.ConfigParser()\n config.read(configfile)\n if not len(config.sections()):\n print_error(\"No named pipes configured\")\n return\n print_notification(\"Starting {} pipes in directory {}\".format(\n len(config.sections()), directory))\n workers = []\n for name in config.sections():\n section = config[name]\n query = create_query(section)\n object_type = type_map[section['type']]\n args = (name, os.path.join(directory, name), object_type, query,\n section['format'], bool(section.get('unique', 0)))\n workers.append(multiprocessing.Process(target=pipe_worker, args=args))\n return workers", - "docstring": "Creates the workers based on the given configfile to provide named pipes in the directory." - }, - { - "code": "def item(proto_dataset_uri, input_file, relpath_in_dataset):\n proto_dataset = dtoolcore.ProtoDataSet.from_uri(\n proto_dataset_uri,\n config_path=CONFIG_PATH)\n if relpath_in_dataset == \"\":\n relpath_in_dataset = os.path.basename(input_file)\n proto_dataset.put_item(input_file, relpath_in_dataset)", - "docstring": "Add a file to the proto dataset." - }, - { - "code": "def size(self):\n for fd in range(3):\n cr = self._ioctl_GWINSZ(fd)\n if cr:\n break\n if not cr:\n try:\n fd = os.open(os.ctermid(), os.O_RDONLY)\n cr = self._ioctl_GWINSZ(fd)\n os.close(fd)\n except Exception:\n pass\n if not cr:\n env = os.environ\n cr = (env.get('LINES', 25), env.get('COLUMNS', 80))\n return int(cr[1]), int(cr[0])", - "docstring": "Get the current terminal size." - }, - { - "code": "def list_granules(self, coverage, store, workspace=None, filter=None, limit=None, offset=None):\n params = dict()\n if filter is not None:\n params['filter'] = filter\n if limit is not None:\n params['limit'] = limit\n if offset is not None:\n params['offset'] = offset\n workspace_name = workspace\n if isinstance(store, basestring):\n store_name = store\n else:\n store_name = store.name\n workspace_name = store.workspace.name\n if workspace_name is None:\n raise ValueError(\"Must specify workspace\")\n url = build_url(\n self.service_url,\n [\n \"workspaces\",\n workspace_name,\n \"coveragestores\",\n store_name,\n \"coverages\",\n coverage,\n \"index/granules.json\"\n ],\n params\n )\n headers = {\n \"Content-type\": \"application/json\",\n \"Accept\": \"application/json\"\n }\n resp = self.http_request(url, headers=headers)\n if resp.status_code != 200:\n FailedRequestError('Failed to list granules in mosaic {} : {}, {}'.format(store, resp.status_code, resp.text))\n self._cache.clear()\n return resp.json()", - "docstring": "List granules of an imagemosaic" - }, - { - "code": "def autodiscover():\n from django.conf import settings\n for application in settings.INSTALLED_APPS:\n module = import_module(application)\n if module_has_submodule(module, 'emails'):\n emails = import_module('%s.emails' % application)\n try:\n import_module('%s.emails.previews' % application)\n except ImportError:\n if module_has_submodule(emails, 'previews'):\n raise", - "docstring": "Imports all available previews classes." - }, - { - "code": "def pitch_contour(annotation, **kwargs):\n ax = kwargs.pop('ax', None)\n ax = mir_eval.display.__get_axes(ax=ax)[0]\n times, values = annotation.to_interval_values()\n indices = np.unique([v['index'] for v in values])\n for idx in indices:\n rows = [i for (i, v) in enumerate(values) if v['index'] == idx]\n freqs = np.asarray([values[r]['frequency'] for r in rows])\n unvoiced = ~np.asarray([values[r]['voiced'] for r in rows])\n freqs[unvoiced] *= -1\n ax = mir_eval.display.pitch(times[rows, 0], freqs, unvoiced=True,\n ax=ax,\n **kwargs)\n return ax", - "docstring": "Plotting wrapper for pitch contours" - }, - { - "code": "def add_program_dir(self, directory):\n dirs = list(self.PROGRAM_DIRS)\n dirs.append(directory)\n self.PROGRAM_DIRS = dirs", - "docstring": "Hack in program directory" - }, - { - "code": "def _check_for_inception(self, root_dict):\n for key in root_dict:\n if isinstance(root_dict[key], dict):\n root_dict[key] = ResponseObject(root_dict[key])\n return root_dict", - "docstring": "Used to check if there is a dict in a dict" - }, - { - "code": "def read_temple_config():\n with open(temple.constants.TEMPLE_CONFIG_FILE) as temple_config_file:\n return yaml.load(temple_config_file, Loader=yaml.SafeLoader)", - "docstring": "Reads the temple YAML configuration file in the repository" - }, - { - "code": "def _generateChildren(self):\n try:\n children = self.AXChildren\n except _a11y.Error:\n return\n if children:\n for child in children:\n yield child", - "docstring": "Generator which yields all AXChildren of the object." - }, - { - "code": "def getSDRforValue(self, i, j):\n assert len(self.fields)>i\n assert self.fields[i].numRecords>j\n encoding = self.fields[i].encodings[j]\n return encoding", - "docstring": "Returns the sdr for jth value at column i" - }, - { - "code": "def conference_deaf(self, call_params):\n path = '/' + self.api_version + '/ConferenceDeaf/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference Deaf helper" - }, - { - "code": "def _calc_loglikelihood(self, model=None, tile=None):\n if model is None:\n res = self.residuals\n else:\n res = model - self._data[tile.slicer]\n sig, isig = self.sigma, 1.0/self.sigma\n nlogs = -np.log(np.sqrt(2*np.pi)*sig)*res.size\n return -0.5*isig*isig*np.dot(res.flat, res.flat) + nlogs", - "docstring": "Allows for fast local updates of log-likelihood" - }, - { - "code": "def getch():\n try:\n termios.tcsetattr(_fd, termios.TCSANOW, _new_settings)\n ch = sys.stdin.read(1)\n finally:\n termios.tcsetattr(_fd, termios.TCSADRAIN, _old_settings)\n return ch", - "docstring": "get character. waiting for key" - }, - { - "code": "def log(self, string):\n self.wfile.write(json.dumps({'log': string}) + NEWLINE)", - "docstring": "Log an event on the CouchDB server." - }, - { - "code": "def unlock_wallet(self, *args, **kwargs):\n self.blockchain.wallet.unlock(*args, **kwargs)\n return self", - "docstring": "Unlock the library internal wallet" - }, - { - "code": "def usage_function(parser):\n parser.print_usage()\n print('')\n print('available functions:')\n for function in sorted(FUNCTION):\n doc = FUNCTION[function].__doc__.strip().splitlines()[0]\n print(' %-12s %s' % (function + ':', doc))\n return 0", - "docstring": "Show usage and available curve functions." - }, - { - "code": "async def _on_connect(self):\n self._user_list, self._conv_list = (\n await hangups.build_user_conversation_list(self._client)\n )\n self._conv_list.on_event.add_observer(self._on_event)\n conv_picker = ConversationPickerWidget(self._conv_list,\n self.on_select_conversation,\n self._keys)\n self._tabbed_window = TabbedWindowWidget(self._keys)\n self._tabbed_window.set_tab(conv_picker, switch=True,\n title='Conversations')\n self._urwid_loop.widget = self._tabbed_window", - "docstring": "Handle connecting for the first time." - }, - { - "code": "def _datetime_to_utc_int(date):\n if date is None:\n return None\n epoch = dsub_util.replace_timezone(datetime.utcfromtimestamp(0), pytz.utc)\n return (date - epoch).total_seconds()", - "docstring": "Convert the integer UTC time value into a local datetime." - }, - { - "code": "def uplink_receive(self, stanza):\n with self.lock:\n if self.stanza_route:\n self.stanza_route.uplink_receive(stanza)\n else:\n logger.debug(u\"Stanza dropped (no route): {0!r}\".format(stanza))", - "docstring": "Handle stanza received from the stream." - }, - { - "code": "def check_unused_args(self, used_args, args, kwargs):\n for k, v in kwargs.items():\n if k in used_args:\n self._used_kwargs.update({k: v})\n else:\n self._unused_kwargs.update({k: v})", - "docstring": "Implement the check_unused_args in superclass." - }, - { - "code": "def missing_particle(separation=0.0, radius=RADIUS, SNR=20):\n s = init.create_two_particle_state(imsize=6*radius+4, axis='x', sigma=1.0/SNR,\n delta=separation, radius=radius, stateargs={'varyn': True}, psfargs={'error': 1e-6})\n s.obj.typ[1] = 0.\n s.reset()\n return s, s.obj.pos.copy()", - "docstring": "create a two particle state and compare it to featuring using a single particle guess" - }, - { - "code": "def _bin_to_dec(ip, check=True):\n if check and not is_bin(ip):\n raise ValueError('_bin_to_dec: invalid IP: \"%s\"' % ip)\n if isinstance(ip, int):\n ip = str(ip)\n return int(str(ip), 2)", - "docstring": "Binary to decimal conversion." - }, - { - "code": "def fill(self, color, start=0, end=-1):\n start = max(start, 0)\n if end < 0 or end >= self.numLEDs:\n end = self.numLEDs - 1\n for led in range(start, end + 1):\n self._set_base(led, color)", - "docstring": "Fill the entire strip with RGB color tuple" - }, - { - "code": "def _dump_to_file(self, file):\n xmltodict.unparse(self.object(), file, pretty=True)", - "docstring": "dump to the file" - }, - { - "code": "def eof(self):\n return (not self.is_alive()) and self._queue.empty() or self._fd.closed", - "docstring": "Check whether there is no more content to expect." - }, - { - "code": "def SetVerboseLevel(self, level):\n last_verbose_level = self.verbose_level\n self.verbose_level = level\n return last_verbose_level", - "docstring": "Sets the module's verbosity, and returns the previous setting." - }, - { - "code": "def gtk_mouse_button_down(self, widget, event):\n if self.menu_enabled and event.button == 3:\n menu = self.uimanager.get_widget('/Save as')\n menu.popup(None, None, None, None, event.button, event.time)\n else:\n super(ShoebotWindow, self).gtk_mouse_button_down(widget, event)", - "docstring": "Handle right mouse button clicks" - }, - { - "code": "def count_top_centrality(graph: BELGraph, number: Optional[int] = 30) -> Mapping[BaseEntity, int]:\n dd = nx.betweenness_centrality(graph)\n dc = Counter(dd)\n return dict(dc.most_common(number))", - "docstring": "Get top centrality dictionary." - }, - { - "code": "def size(self):\n try:\n return self._stat.st_size\n except:\n self._stat = self.stat()\n return self.size", - "docstring": "File size in bytes." - }, - { - "code": "def begin(self):\n self.connect(self.host, self.port)\n if self.user:\n self.starttls()\n self.login(self.user, self.password)", - "docstring": "connects and optionally authenticates a connection." - }, - { - "code": "def handle_authorized(self, event):\n request_software_version(self.client, self.target_jid,\n self.success, self.failure)", - "docstring": "Send the initial presence after log-in." - }, - { - "code": "def noformat(self):\n try:\n formats = {}\n for h in self.get_handlers():\n formats[h] = h.formatter\n self.set_formatter(formatter='quiet')\n yield\n except Exception as e:\n raise\n finally:\n for k,v in iteritems(formats):\n k.formatter = v", - "docstring": "Temporarily do not use any formatter so that text printed is raw" - }, - { - "code": "def unregister(self):\n for k in list(env.keys()):\n if k.startswith(self.env_prefix):\n del env[k]\n try:\n del all_satchels[self.name.upper()]\n except KeyError:\n pass\n try:\n del manifest_recorder[self.name]\n except KeyError:\n pass\n try:\n del manifest_deployers[self.name.upper()]\n except KeyError:\n pass\n try:\n del manifest_deployers_befores[self.name.upper()]\n except KeyError:\n pass\n try:\n del required_system_packages[self.name.upper()]\n except KeyError:\n pass", - "docstring": "Removes this satchel from global registeries." - }, - { - "code": "def transform(x):\n try:\n x = date2num(x)\n except AttributeError:\n x = [pd.Timestamp(item) for item in x]\n x = date2num(x)\n return x", - "docstring": "Transform from date to a numerical format" - }, - { - "code": "def add_key(self, key):\n if key not in self.value:\n self.value[key] = ReducedMetric(self.reducer)", - "docstring": "Adds a new key to this metric" - }, - { - "code": "def flush_buffer(self):\n self.code_builder.add_line('{0}.extend([{1}])',\n self.result_var, ','.join(self.buffered))\n self.buffered = []", - "docstring": "flush all buffered string into code" - }, - { - "code": "def exact_sqrt(n2):\n \"If n2 is a perfect square, return its square root, else raise error.\"\n n = int(math.sqrt(n2))\n assert n * n == n2\n return n", - "docstring": "If n2 is a perfect square, return its square root, else raise error." - }, - { - "code": "def columns(x, rho, proxop):\n xnext = np.zeros_like(x)\n for ix in range(x.shape[1]):\n xnext[:, ix] = proxop(x[:, ix], rho)\n return xnext", - "docstring": "Applies a proximal operator to the columns of a matrix" - }, - { - "code": "def dropbox_form(request):\n from briefkasten import generate_post_token\n token = generate_post_token(secret=request.registry.settings['post_secret'])\n return dict(\n action=request.route_url('dropbox_form_submit', token=token),\n fileupload_url=request.route_url('dropbox_fileupload', token=token),\n **defaults(request))", - "docstring": "generates a dropbox uid and renders the submission form with a signed version of that id" - }, - { - "code": "def transform_image(self, content_metadata_item):\n image_url = ''\n if content_metadata_item['content_type'] in ['course', 'program']:\n image_url = content_metadata_item.get('card_image_url')\n elif content_metadata_item['content_type'] == 'courserun':\n image_url = content_metadata_item.get('image_url')\n return image_url", - "docstring": "Return the image URI of the content item." - }, - { - "code": "def record_replace_field(rec, tag, new_field, field_position_global=None,\n field_position_local=None):\n if field_position_global is None and field_position_local is None:\n raise InvenioBibRecordFieldError(\n \"A field position is required to \"\n \"complete this operation.\")\n elif field_position_global is not None and \\\n field_position_local is not None:\n raise InvenioBibRecordFieldError(\n \"Only one field position is required \"\n \"to complete this operation.\")\n elif field_position_global:\n if tag not in rec:\n raise InvenioBibRecordFieldError(\"No tag '%s' in record.\" % tag)\n replaced = False\n for position, field in enumerate(rec[tag]):\n if field[4] == field_position_global:\n rec[tag][position] = new_field\n replaced = True\n if not replaced:\n raise InvenioBibRecordFieldError(\n \"No field has the tag '%s' and \"\n \"the global field position '%d'.\" %\n (tag, field_position_global))\n else:\n try:\n rec[tag][field_position_local] = new_field\n except KeyError:\n raise InvenioBibRecordFieldError(\"No tag '%s' in record.\" % tag)\n except IndexError:\n raise InvenioBibRecordFieldError(\n \"No field has the tag '%s' and \"\n \"the local field position '%d'.\" % (tag, field_position_local))", - "docstring": "Replace a field with a new field." - }, - { - "code": "def limit_sentences (path, word_limit=100):\n word_count = 0\n if isinstance(path, str):\n path = json_iter(path)\n for meta in path:\n if not isinstance(meta, SummarySent):\n p = SummarySent(**meta)\n else:\n p = meta\n sent_text = p.text.strip().split(\" \")\n sent_len = len(sent_text)\n if (word_count + sent_len) > word_limit:\n break\n else:\n word_count += sent_len\n yield sent_text, p.idx", - "docstring": "iterator for the most significant sentences, up to a specified limit" - }, - { - "code": "def headerHTML(self,fname=None):\n if fname is None:\n fname = self.fname.replace(\".abf\",\"_header.html\")\n html=\"\"\n html+=\"

abfinfo() for %s.abf

\"%self.ID\n html+=self.abfinfo().replace(\"<\",\"<\").replace(\">\",\">\").replace(\"\\n\",\"
\")\n html+=\"

Header for %s.abf

\"%self.ID\n html+=pprint.pformat(self.header, indent=1)\n html=html.replace(\"\\n\",'
').replace(\" \",\" \")\n html=html.replace(r\"\\x00\",\"\")\n html+=\"
\"\n print(\"WRITING HEADER TO:\")\n print(fname)\n f=open(fname,'w')\n f.write(html)\n f.close()", - "docstring": "read the ABF header and save it HTML formatted." - }, - { - "code": "def _get_contour_values(min_val, max_val, base=0, interval=100):\n i = base\n out = []\n if min_val < base:\n while i >= min_val:\n i -= interval\n while i <= max_val:\n if i >= min_val:\n out.append(i)\n i += interval\n return out", - "docstring": "Return a list of values between min and max within an interval." - }, - { - "code": "def write_triples(filename, triples, delimiter=DEFAULT_DELIMITER, triple_order=\"hrt\"):\n with open(filename, 'w') as f:\n for t in triples:\n line = t.serialize(delimiter, triple_order)\n f.write(line + \"\\n\")", - "docstring": "write triples to file." - }, - { - "code": "def height(self):\n if len(self.coords) <= 1:\n return 0\n return np.max(self.yy) - np.min(self.yy)", - "docstring": "Get the height of a bounding box encapsulating the line." - }, - { - "code": "def validate_image_size(image):\n config = get_app_config()\n valid_max_image_size_in_bytes = config.valid_max_image_size * 1024\n if config and not image.size <= valid_max_image_size_in_bytes:\n raise ValidationError(\n _(\"The logo image file size must be less than or equal to %s KB.\") % config.valid_max_image_size)", - "docstring": "Validate that a particular image size." - }, - { - "code": "def run(cmd):\n cmd = [pipes.quote(c) for c in cmd]\n cmd = \" \".join(cmd)\n cmd += \"; exit 0\"\n try:\n output = subprocess.check_output(cmd,\n stderr=subprocess.STDOUT,\n shell=True)\n except subprocess.CalledProcessError as e:\n output = e.output\n output = output.decode('utf-8')\n output = output.strip()\n return output", - "docstring": "Run a shell command" - }, - { - "code": "def add_cms_link(self):\n intnote = record_get_field_values(self.record, '690',\n filter_subfield_code=\"a\",\n filter_subfield_value='INTNOTE')\n if intnote:\n val_088 = record_get_field_values(self.record,\n tag='088',\n filter_subfield_code=\"a\")\n for val in val_088:\n if 'CMS' in val:\n url = ('http://weblib.cern.ch/abstract?CERN-CMS' +\n val.split('CMS', 1)[-1])\n record_add_field(self.record,\n tag='856',\n ind1='4',\n subfields=[('u', url)])", - "docstring": "Special handling if record is a CMS NOTE." - }, - { - "code": "def merge_da(self):\n print(' - Merging D and A timestamps', flush=True)\n ts_d, ts_par_d = self.S.get_timestamps_part(self.name_timestamps_d)\n ts_a, ts_par_a = self.S.get_timestamps_part(self.name_timestamps_a)\n ts, a_ch, part = merge_da(ts_d, ts_par_d, ts_a, ts_par_a)\n assert a_ch.sum() == ts_a.shape[0]\n assert (~a_ch).sum() == ts_d.shape[0]\n assert a_ch.size == ts_a.shape[0] + ts_d.shape[0]\n self.ts, self.a_ch, self.part = ts, a_ch, part\n self.clk_p = ts_d.attrs['clk_p']", - "docstring": "Merge donor and acceptor timestamps, computes `ts`, `a_ch`, `part`." - }, - { - "code": "def unlocked(self):\n if self.password is not None:\n return bool(self.password)\n else:\n if (\n \"UNLOCK\" in os.environ\n and os.environ[\"UNLOCK\"]\n and self.config_key in self.config\n and self.config[self.config_key]\n ):\n log.debug(\"Trying to use environmental \" \"variable to unlock wallet\")\n self.unlock(os.environ.get(\"UNLOCK\"))\n return bool(self.password)\n return False", - "docstring": "Is the store unlocked so that I can decrypt the content?" - }, - { - "code": "def duplicated(values: Sequence):\n vals = pd.Series(values)\n return vals[vals.duplicated()]", - "docstring": "Return the duplicated items in `values`" - }, - { - "code": "def calculate_uuid(self):\n if self.uuid_input_fields is None:\n raise NotImplementedError(\n)\n if self.uuid_input_fields == \"RANDOM\":\n return uuid.uuid4().hex\n assert isinstance(self.uuid_input_fields, tuple), \"'uuid_input_fields' must either be a tuple or the string 'RANDOM'\"\n hashable_input_vals = []\n for field in self.uuid_input_fields:\n new_value = getattr(self, field)\n if new_value:\n hashable_input_vals.append(str(new_value))\n hashable_input = \":\".join(hashable_input_vals)\n if not hashable_input:\n return uuid.uuid4().hex\n return sha2_uuid(hashable_input)", - "docstring": "Should return a 32-digit hex string for a UUID that is calculated as a function of a set of fields from the model." - }, - { - "code": "def write_error_response(self, message):\n self.set_status(404)\n response = self.make_error_response(str(message))\n now = time.time()\n spent = now - self.basehandler_starttime\n response[constants.RESPONSE_KEY_EXECUTION_TIME] = spent\n self.write_json_response(response)", - "docstring": "Writes the message as part of the response and sets 404 status." - }, - { - "code": "def delete(self):\n url = PATHS['DELETE'] % self.id\n return self.api.post(url=url)", - "docstring": "Permanently delete an existing status update." - }, - { - "code": "def show_condition_operators(self, condition):\n permitted_operators = self.savedsearch.conditions_operators.get(condition)\n permitted_operators_list = set(\n [self.savedsearch.operators.get(op) for op in permitted_operators]\n )\n return permitted_operators_list", - "docstring": "Show available operators for a given saved search condition" - }, - { - "code": "def with_setup(self, colormode=None, colorpalette=None, extend_colors=False):\n colorful = Colorful(\n colormode=self.colorful.colormode,\n colorpalette=copy.copy(self.colorful.colorpalette)\n )\n colorful.setup(\n colormode=colormode, colorpalette=colorpalette, extend_colors=extend_colors\n )\n yield colorful", - "docstring": "Return a new Colorful object with the given color config." - }, - { - "code": "def _uptime_syllable():\n global __boottime\n try:\n __boottime = os.stat('/dev/pty/mst/pty0').st_mtime\n return time.time() - __boottime\n except (NameError, OSError):\n return None", - "docstring": "Returns uptime in seconds or None, on Syllable." - }, - { - "code": "def rosenbrock(theta):\n x, y = theta\n obj = (1 - x)**2 + 100 * (y - x**2)**2\n grad = np.zeros(2)\n grad[0] = 2 * x - 400 * (x * y - x**3) - 2\n grad[1] = 200 * (y - x**2)\n return obj, grad", - "docstring": "Objective and gradient for the rosenbrock function" - }, - { - "code": "def build(self, track, requester):\r\n try:\r\n self.track = track['track']\r\n self.identifier = track['info']['identifier']\r\n self.can_seek = track['info']['isSeekable']\r\n self.author = track['info']['author']\r\n self.duration = track['info']['length']\r\n self.stream = track['info']['isStream']\r\n self.title = track['info']['title']\r\n self.uri = track['info']['uri']\r\n self.requester = requester\r\n return self\r\n except KeyError:\r\n raise InvalidTrack('An invalid track was passed.')", - "docstring": "Returns an optional AudioTrack." - }, - { - "code": "def find_activations(graph: BELGraph):\n for u, v, key, data in graph.edges(keys=True, data=True):\n if u != v:\n continue\n bel = graph.edge_to_bel(u, v, data)\n line = data.get(LINE)\n if line is None:\n continue\n elif has_protein_modification_increases_activity(graph, u, v, key):\n print(line, '- pmod changes -', bel)\n find_related(graph, v, data)\n elif has_degradation_increases_activity(data):\n print(line, '- degradation changes -', bel)\n find_related(graph, v, data)\n elif has_translocation_increases_activity(data):\n print(line, '- translocation changes -', bel)\n find_related(graph, v, data)\n elif complex_increases_activity(graph, u, v, key):\n print(line, '- complex changes - ', bel)\n find_related(graph, v, data)\n elif has_same_subject_object(graph, u, v, key):\n print(line, '- same sub/obj -', bel)\n else:\n print(line, '- *** - ', bel)", - "docstring": "Find edges that are A - A, meaning that some conditions in the edge best describe the interaction." - }, - { - "code": "def play(self):\n if not self.is_playing():\n self.play_pause()\n self._is_playing = True\n self.playEvent(self)", - "docstring": "Play the video asynchronously returning control immediately to the calling code" - }, - { - "code": "def save_as(self):\n chooser = ShoebotFileChooserDialog(_('Save File'), None, Gtk.FileChooserAction.SAVE,\n (Gtk.STOCK_SAVE, Gtk.ResponseType.ACCEPT,\n Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL))\n chooser.set_do_overwrite_confirmation(True)\n chooser.set_transient_for(self)\n saved = chooser.run() == Gtk.ResponseType.ACCEPT\n if saved:\n old_filename = self.filename\n self.source_buffer.filename = chooser.get_filename()\n if not self.save():\n self.filename = old_filename\n chooser.destroy()\n return saved", - "docstring": "Return True if the buffer was saved" - }, - { - "code": "def contains_only(self, *items):\n if len(items) == 0:\n raise ValueError('one or more args must be given')\n else:\n extra = []\n for i in self.val:\n if i not in items:\n extra.append(i)\n if extra:\n self._err('Expected <%s> to contain only %s, but did contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(extra)))\n missing = []\n for i in items:\n if i not in self.val:\n missing.append(i)\n if missing:\n self._err('Expected <%s> to contain only %s, but did not contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(missing)))\n return self", - "docstring": "Asserts that val contains only the given item or items." - }, - { - "code": "def paginate_update(update):\n from happenings.models import Update\n time = update.pub_time\n event = update.event\n try:\n next = Update.objects.filter(\n event=event,\n pub_time__gt=time\n ).order_by('pub_time').only('title')[0]\n except:\n next = None\n try:\n previous = Update.objects.filter(\n event=event,\n pub_time__lt=time\n ).order_by('-pub_time').only('title')[0]\n except:\n previous = None\n return {'next': next, 'previous': previous, 'event': event}", - "docstring": "attempts to get next and previous on updates" - }, - { - "code": "def delete(self, repo, args=[]):\n result = None\n with cd(repo.rootdir):\n try:\n cmd = ['rm'] + list(args)\n result = {\n 'status': 'success',\n 'message': self._run(cmd)\n }\n except Exception as e:\n result = {\n 'status': 'error',\n 'message': str(e)\n }\n return result", - "docstring": "Delete files from the repo" - }, - { - "code": "def update_Broyden_J(self):\n CLOG.debug('Broyden update.')\n delta_vals = self.param_vals - self._last_vals\n delta_residuals = self.calc_residuals() - self._last_residuals\n nrm = np.sqrt(np.dot(delta_vals, delta_vals))\n direction = delta_vals / nrm\n vals = delta_residuals / nrm\n self._rank_1_J_update(direction, vals)\n self.JTJ = np.dot(self.J, self.J.T)", - "docstring": "Execute a Broyden update of J" - }, - { - "code": "def getnodefor(self, name):\n \"Return the node where the ``name`` would land to\"\n node = self._getnodenamefor(name)\n return {node: self.cluster['nodes'][node]}", - "docstring": "Return the node where the ``name`` would land to" - }, - { - "code": "def _linelength(self, x0, y0, x1, y1):\n a = pow(abs(x0 - x1), 2)\n b = pow(abs(y0 - y1), 2)\n return sqrt(a + b)", - "docstring": "Returns the length of the line." - }, - { - "code": "def mock_request():\n current_site = Site.objects.get_current()\n request = HttpRequest()\n request.META['SERVER_NAME'] = current_site.domain\n return request", - "docstring": "Generate a fake request object to allow oEmbeds to use context processors." - }, - { - "code": "def apply(self):\n sorted = self.order + self.keys()\n unique = []; [unique.append(x) for x in sorted if x not in unique]\n for node in self.graph.nodes:\n for s in unique:\n if self.has_key(s) and self[s](self.graph, node): \n node.style = s", - "docstring": "Check the rules for each node in the graph and apply the style." - }, - { - "code": "def run(self, raw_args):\n parser = self.parser\n args, kwargs = parser.parse_callback_args(raw_args)\n callback = kwargs.pop(\"main_callback\")\n if parser.has_injected_quiet():\n levels = kwargs.pop(\"quiet_inject\", \"\")\n logging.inject_quiet(levels)\n try:\n ret_code = callback(*args, **kwargs)\n ret_code = int(ret_code) if ret_code else 0\n except ArgError as e:\n echo.err(\"{}: error: {}\", parser.prog, str(e))\n ret_code = 2\n return ret_code", - "docstring": "parse and import the script, and then run the script's main function" - }, - { - "code": "def _uptime_amiga():\n global __boottime\n try:\n __boottime = os.stat('RAM:').st_ctime\n return time.time() - __boottime\n except (NameError, OSError):\n return None", - "docstring": "Returns uptime in seconds or None, on AmigaOS." - }, - { - "code": "def rmse(a, b):\n return np.sqrt(np.square(a - b).mean())", - "docstring": "Returns the root mean square error betwwen a and b" - }, - { - "code": "def isodate(datestamp=None, microseconds=False):\n datestamp = datestamp or datetime.datetime.now()\n if not microseconds:\n usecs = datetime.timedelta(microseconds=datestamp.microsecond)\n datestamp = datestamp - usecs\n return datestamp.isoformat(b' ' if PY2 else u' ')", - "docstring": "Return current or given time formatted according to ISO-8601." - }, - { - "code": "def cmd_status(opts):\n config = load_config(opts.config)\n b = get_blockade(config, opts)\n containers = b.status()\n print_containers(containers, opts.json)", - "docstring": "Print status of containers and networks" - }, - { - "code": "def pass_from_pipe(cls):\n is_pipe = not sys.stdin.isatty()\n return is_pipe and cls.strip_last_newline(sys.stdin.read())", - "docstring": "Return password from pipe if not on TTY, else False." - }, - { - "code": "def main(branch):\n try:\n output = subprocess.check_output(['git', 'rev-parse']).decode('utf-8')\n sys.stdout.write(output)\n except subprocess.CalledProcessError:\n return\n ensure_remote_branch_is_tracked(branch)\n subprocess.check_call(['git', 'checkout', '--quiet', branch])\n subprocess.check_call(['git', 'pull', '--quiet'])\n subprocess.check_call(['git', 'checkout', '--quiet', '%s~0' % branch])\n subprocess.check_call(['find', '.', '-name', '\"*.pyc\"', '-delete'])\n print('Your branch is up to date with branch \\'origin/%s\\'.' % branch)", - "docstring": "Checkout, update and branch from the specified branch." - }, - { - "code": "def angle(x1, y1, x2, y2):\n sign = 1.0\n usign = (x1*y2 - y1*x2)\n if usign < 0:\n sign = -1.0\n num = x1*x2 + y1*y2\n den = hypot(x1,y1) * hypot(x2,y2)\n ratio = min(max(num/den, -1.0), 1.0)\n return sign * degrees(acos(ratio))", - "docstring": "The angle in degrees between two vectors." - }, - { - "code": "def getRecord(self, n=None):\n if n is None:\n assert len(self.fields)>0\n n = self.fields[0].numRecords-1\n assert (all(field.numRecords>n for field in self.fields))\n record = [field.values[n] for field in self.fields]\n return record", - "docstring": "Returns the nth record" - }, - { - "code": "def save(self, entry, with_location=True, debug=False):\n entry_dict = {}\n if isinstance(entry, DayOneEntry):\n entry_dict = entry.as_dict()\n else:\n entry_dict = entry\n entry_dict['UUID'] = uuid.uuid4().get_hex()\n if with_location and not entry_dict['Location']:\n entry_dict['Location'] = self.get_location()\n if not all ((entry_dict['UUID'], entry_dict['Time Zone'],\n entry_dict['Entry Text'])):\n print \"You must provide: Time zone, UUID, Creation Date, Entry Text\"\n return False\n if debug is False:\n file_path = self._file_path(entry_dict['UUID'])\n plistlib.writePlist(entry_dict, file_path)\n else:\n plist = plistlib.writePlistToString(entry_dict)\n print plist\n return True", - "docstring": "Saves a DayOneEntry as a plist" - }, - { - "code": "def mtime(self):\n try:\n return self._stat.st_mtime\n except:\n self._stat = self.stat()\n return self.mtime", - "docstring": "Get most recent modify time in timestamp." - }, - { - "code": "def _sm_relieve_pain(self, *args, **kwargs):\n _logger.info(\n \"Ending the degradation for blockade %s\" % self._blockade_name)\n self._do_reset_all()\n millisec = random.randint(self._start_min_delay, self._start_max_delay)\n self._timer = threading.Timer(millisec/1000.0, self.event_timeout)\n self._timer.start()", - "docstring": "End the blockade event and return to a steady state" - }, - { - "code": "def _task_directory(self, job_id, task_id, task_attempt):\n dir_name = 'task' if task_id is None else str(task_id)\n if task_attempt:\n dir_name = '%s.%s' % (dir_name, task_attempt)\n return self._provider_root() + '/' + job_id + '/' + dir_name", - "docstring": "The local dir for staging files for that particular task." - }, - { - "code": "def nolist(self, account):\n assert callable(self.blockchain.account_whitelist)\n return self.blockchain.account_whitelist(account, lists=[], account=self)", - "docstring": "Remove an other account from any list of this account" - }, - { - "code": "def collect_genv(self, include_local=True, include_global=True):\n e = type(self.genv)()\n if include_global:\n e.update(self.genv)\n if include_local:\n for k, v in self.lenv.items():\n e['%s_%s' % (self.obj.name.lower(), k)] = v\n return e", - "docstring": "Returns a copy of the global environment with all the local variables copied back into it." - }, - { - "code": "def validate_state_locations(self):\n names = map(lambda loc: loc[\"name\"], self.locations)\n assert len(names) == len(set(names)), \"Names of state locations must be unique\"", - "docstring": "Names of all state locations must be unique." - }, - { - "code": "def check_md5sum_change(src_file):\n src_md5 = get_md5sum(src_file)\n src_md5_file = src_file + '.md5'\n src_file_changed = True\n if os.path.exists(src_md5_file):\n with open(src_md5_file, 'r') as file_checksum:\n ref_md5 = file_checksum.read()\n if src_md5 == ref_md5:\n src_file_changed = False\n if src_file_changed:\n with open(src_md5_file, 'w') as file_checksum:\n file_checksum.write(src_md5)\n return src_file_changed", - "docstring": "Returns True if src_file has a different md5sum" - }, - { - "code": "def show_variables_window(self):\n if self.var_window is None and self.bot._vars:\n self.var_window = VarWindow(self, self.bot, '%s variables' % (self.title or 'Shoebot'))\n self.var_window.window.connect(\"destroy\", self.var_window_closed)", - "docstring": "Show the variables window." - }, - { - "code": "async def send_chat_message(self, send_chat_message_request):\n response = hangouts_pb2.SendChatMessageResponse()\n await self._pb_request('conversations/sendchatmessage',\n send_chat_message_request, response)\n return response", - "docstring": "Send a chat message to a conversation." - }, - { - "code": "def forward_events_to(self, sink, include_source=False):\n assert isinstance(sink, Eventful), f'{sink.__class__.__name__} is not Eventful'\n self._forwards[sink] = include_source", - "docstring": "This forwards signal to sink" - }, - { - "code": "def _datetime_in_range(self, dt, dt_min=None, dt_max=None):\n dt = dt.replace(microsecond=0)\n if dt_min:\n dt_min = dt_min.replace(microsecond=0)\n else:\n dt_min = dsub_util.replace_timezone(datetime.datetime.min, pytz.utc)\n if dt_max:\n dt_max = dt_max.replace(microsecond=0)\n else:\n dt_max = dsub_util.replace_timezone(datetime.datetime.max, pytz.utc)\n return dt_min <= dt <= dt_max", - "docstring": "Determine if the provided time is within the range, inclusive." - }, - { - "code": "def remove_random_edge_until_has_leaves(self) -> None:\n while True:\n leaves = set(self.iter_leaves())\n if leaves:\n return\n self.remove_random_edge()", - "docstring": "Remove random edges until there is at least one leaf node." - }, - { - "code": "def emit(self, record):\n try:\n self.redis_client.publish(self.channel, self.format(record))\n except redis.RedisError:\n pass", - "docstring": "Publish record to redis logging channel" - }, - { - "code": "def _post_resource(self, url, body):\n headers = {\"Content-Type\": \"application/json\",\n \"Accept\": \"application/json\"}\n if self.token:\n headers[\"W-Token\"] = \"%s\" % self.token\n response = WhenIWork_DAO().postURL(url, headers, json.dumps(body))\n if not (response.status == 200 or response.status == 204):\n raise DataFailureException(url, response.status, response.data)\n return json.loads(response.data)", - "docstring": "When I Work POST method." - }, - { - "code": "def scaper_to_tag(annotation):\n annotation.namespace = 'tag_open'\n data = annotation.pop_data()\n for obs in data:\n annotation.append(time=obs.time, duration=obs.duration,\n confidence=obs.confidence, value=obs.value['label'])\n return annotation", - "docstring": "Convert scaper annotations to tag_open" - }, - { - "code": "def file(self, item, **kwargs):\n query_string = \"/{t}/{u}/items/{i}/file\".format(\n u=self.library_id, t=self.library_type, i=item.upper()\n )\n return self._build_query(query_string, no_params=True)", - "docstring": "Get the file from an specific item" - }, - { - "code": "def find_imports(self, pbds):\n imports = list(set(self.uses).difference(set(self.defines)))\n for imp in imports:\n for p in pbds:\n if imp in p.defines:\n self.imports.append(p.name)\n break\n self.imports = list(set(self.imports))\n for import_file in self.imports:\n self.lines.insert(2, 'import \"{}\";'.format(import_file))", - "docstring": "Find all missing imports in list of Pbd instances." - }, - { - "code": "def belns(keyword: str, file: TextIO, encoding: Optional[str], use_names: bool):\n directory = get_data_dir(keyword)\n obo_url = f'http://purl.obolibrary.org/obo/{keyword}.obo'\n obo_path = os.path.join(directory, f'{keyword}.obo')\n obo_cache_path = os.path.join(directory, f'{keyword}.obo.pickle')\n obo_getter = make_obo_getter(obo_url, obo_path, preparsed_path=obo_cache_path)\n graph = obo_getter()\n convert_obo_graph_to_belns(\n graph,\n file=file,\n encoding=encoding,\n use_names=use_names,\n )", - "docstring": "Write as a BEL namespace." - }, - { - "code": "def save_policy(self, path):\n with open(path, 'wb') as f:\n pickle.dump(self.policy, f)", - "docstring": "Pickles the current policy for later inspection." - }, - { - "code": "def parse(grid_str, mode=MODE_ZINC, charset='utf-8'):\n if isinstance(grid_str, six.binary_type):\n grid_str = grid_str.decode(encoding=charset)\n _parse = functools.partial(parse_grid, mode=mode,\n charset=charset)\n if mode == MODE_JSON:\n if isinstance(grid_str, six.string_types):\n grid_data = json.loads(grid_str)\n else:\n grid_data = grid_str\n if isinstance(grid_data, dict):\n return _parse(grid_data)\n else:\n return list(map(_parse, grid_data))\n else:\n return list(map(_parse, GRID_SEP.split(grid_str.rstrip())))", - "docstring": "Parse the given Zinc text and return the equivalent data." - }, - { - "code": "def _isobject(self, name, exist):\r\n if exist in [2, 5]:\r\n return False\r\n cmd = 'isobject(%s)' % name\r\n resp = self._engine.eval(cmd, silent=True).strip()\r\n return resp == 'ans = 1'", - "docstring": "Test whether the name is an object." - }, - { - "code": "def run(self):\n\t\tself.checkProperties()\n\t\tself.debug(\"[*] Iniciando escaneo de AtomShields con las siguientes propiedades. . . \")\n\t\tself.showScanProperties()\n\t\tself.loadConfig()\n\t\tinit_ts = datetime.now()\n\t\tcwd = os.getcwd()\n\t\tos.chdir(self.path)\n\t\tissues = self.executeCheckers()\n\t\tos.chdir(cwd)\n\t\tend_ts = datetime.now()\n\t\tduration = '{}'.format(end_ts - init_ts)\n\t\tfor plugin in issues.keys():\n\t\t\tvalue = issues[plugin]\n\t\t\tif isinstance(value, list):\n\t\t\t\tmap(self.saveIssue, value)\n\t\t\telse:\n\t\t\t\tself.saveIssue(value)\n\t\tprint \"\"\n\t\tself.executeReports()\n\t\tself.debug(\"\")\n\t\tself.debug(\"Duration: {t}\".format(t=duration))\n\t\tself.showSummary()\n\t\treturn self.issues", - "docstring": "Run a scan in the path setted." - }, - { - "code": "def dir_maker(path):\n directory = os.path.dirname(path)\n if directory != '' and not os.path.isdir(directory):\n try:\n os.makedirs(directory)\n except OSError as e:\n sys.exit('Failed to create directory: {}'.format(e))", - "docstring": "Create a directory if it does not exist." - }, - { - "code": "def autodiscover(self, url):\n headers, response = fetch_url(url)\n if headers['content-type'].split(';')[0] in ('application/json', 'text/javascript'):\n provider_data = json.loads(response)\n return self.store_providers(provider_data)", - "docstring": "Load up StoredProviders from url if it is an oembed scheme" - }, - { - "code": "def _fire(self, layers, the_plot):\n if the_plot.get('last_marauder_shot') == the_plot.frame: return\n the_plot['last_marauder_shot'] = the_plot.frame\n col = np.random.choice(np.nonzero(layers['X'].sum(axis=0))[0])\n row = np.nonzero(layers['X'][:, col])[0][-1] + 1\n self._teleport((row, col))", - "docstring": "Launches a new bolt from a random Marauder." - }, - { - "code": "def _update_yaw_and_pitch(self):\n front = Vector3([0.0, 0.0, 0.0])\n front.x = cos(radians(self.yaw)) * cos(radians(self.pitch))\n front.y = sin(radians(self.pitch))\n front.z = sin(radians(self.yaw)) * cos(radians(self.pitch))\n self.dir = vector.normalise(front)\n self.right = vector.normalise(vector3.cross(self.dir, self._up))\n self.up = vector.normalise(vector3.cross(self.right, self.dir))", - "docstring": "Updates the camera vectors based on the current yaw and pitch" - }, - { - "code": "def init(self):\n self.es.indices.create(index=self.params['index'], ignore=400)", - "docstring": "Create an Elasticsearch index if necessary" - }, - { - "code": "def normalize(self, string):\n return ''.join([self._normalize.get(x, x) for x in nfd(string)])", - "docstring": "Normalize the string according to normalization list" - }, - { - "code": "def camel(theta):\n x, y = theta\n obj = 2 * x ** 2 - 1.05 * x ** 4 + x ** 6 / 6 + x * y + y ** 2\n grad = np.array([\n 4 * x - 4.2 * x ** 3 + x ** 5 + y,\n x + 2 * y\n ])\n return obj, grad", - "docstring": "Three-hump camel function" - }, - { - "code": "def list_namespaces():\n print('{:30s}\\t{:40s}'.format('NAME', 'DESCRIPTION'))\n print('-' * 78)\n for sch in sorted(__NAMESPACE__):\n desc = __NAMESPACE__[sch]['description']\n desc = (desc[:44] + '..') if len(desc) > 46 else desc\n print('{:30s}\\t{:40s}'.format(sch, desc))", - "docstring": "Print out a listing of available namespaces" - }, - { - "code": "def fill_dups_arr(data):\n duplefiles = glob.glob(os.path.join(data.tmpdir, \"duples_*.tmp.npy\"))\n duplefiles.sort(key=lambda x: int(x.rsplit(\"_\", 1)[-1][:-8]))\n io5 = h5py.File(data.clust_database, 'r+')\n dfilter = io5[\"duplicates\"]\n init = 0\n for dupf in duplefiles:\n end = int(dupf.rsplit(\"_\", 1)[-1][:-8])\n inarr = np.load(dupf)\n dfilter[init:end] = inarr\n init += end-init\n LOGGER.info(\"all duplicates: %s\", dfilter[:].sum())\n io5.close()", - "docstring": "fills the duplicates array from the multi_muscle_align tmp files" - }, - { - "code": "def add_bpmn_files(self, filenames):\n for filename in filenames:\n f = open(filename, 'r')\n try:\n self.add_bpmn_xml(ET.parse(f), filename=filename)\n finally:\n f.close()", - "docstring": "Add all filenames in the given list to the parser's set." - }, - { - "code": "def _get_well_known_file():\n default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR)\n if default_config_dir is None:\n if os.name == 'nt':\n try:\n default_config_dir = os.path.join(os.environ['APPDATA'],\n _CLOUDSDK_CONFIG_DIRECTORY)\n except KeyError:\n drive = os.environ.get('SystemDrive', 'C:')\n default_config_dir = os.path.join(drive, '\\\\',\n _CLOUDSDK_CONFIG_DIRECTORY)\n else:\n default_config_dir = os.path.join(os.path.expanduser('~'),\n '.config',\n _CLOUDSDK_CONFIG_DIRECTORY)\n return os.path.join(default_config_dir, _WELL_KNOWN_CREDENTIALS_FILE)", - "docstring": "Get the well known file produced by command 'gcloud auth login'." - }, - { - "code": "def subscribe(self, event, bet_ids):\n if not self._subscriptions.get(event):\n self._subscriptions[event] = set()\n self._subscriptions[event] = self._subscriptions[event].union(bet_ids)", - "docstring": "Subscribe to event for given bet ids." - }, - { - "code": "def chisq_red(self):\n if self._chisq_red is None:\n self._chisq_red = chisquare(self.y_unweighted.transpose(), _np.dot(self.X_unweighted, self.beta), self.y_error, ddof=3, verbose=False)\n return self._chisq_red", - "docstring": "The reduced chi-square of the linear least squares" - }, - { - "code": "def setup(app):\n if 'http' not in app.domains:\n httpdomain.setup(app)\n app.add_directive('autopyramid', RouteDirective)", - "docstring": "Hook the directives when Sphinx ask for it." - }, - { - "code": "def parse_course_key(course_identifier):\n try:\n course_run_key = CourseKey.from_string(course_identifier)\n except InvalidKeyError:\n return course_identifier\n return quote_plus(' '.join([course_run_key.org, course_run_key.course]))", - "docstring": "Return the serialized course key given either a course run ID or course key." - }, - { - "code": "def _build_stat(self, idx):\n nameordered = self.samples.keys()\n nameordered.sort()\n newdat = pd.DataFrame([self.samples[i].stats_dfs[idx] \\\n for i in nameordered], index=nameordered)\\\n .dropna(axis=1, how='all')\n return newdat", - "docstring": "Returns a data frame with Sample stats for each step" - }, - { - "code": "def send(self, dispatcher):\n if self.sent_complete:\n return\n sent = dispatcher.send(self.to_send)\n self.to_send = self.to_send[sent:]", - "docstring": "Sends this outgoing packet to dispatcher's socket" - }, - { - "code": "def _generate_assertion(self):\n now = int(time.time())\n payload = {\n 'aud': self.token_uri,\n 'scope': self._scopes,\n 'iat': now,\n 'exp': now + self.MAX_TOKEN_LIFETIME_SECS,\n 'iss': self._service_account_email,\n }\n payload.update(self._kwargs)\n return crypt.make_signed_jwt(self._signer, payload,\n key_id=self._private_key_id)", - "docstring": "Generate the assertion that will be used in the request." - }, - { - "code": "def find_standard_sakefile(settings):\n error = settings[\"error\"]\n if settings[\"customsake\"]:\n custom = settings[\"customsake\"]\n if not os.path.isfile(custom):\n error(\"Specified sakefile '{}' doesn't exist\", custom)\n sys.exit(1)\n return custom\n for name in [\"Sakefile\", \"Sakefile.yaml\", \"Sakefile.yml\"]:\n if os.path.isfile(name):\n return name\n error(\"Error: there is no Sakefile to read\")\n sys.exit(1)", - "docstring": "Returns the filename of the appropriate sakefile" - }, - { - "code": "def _in_git_repo():\n ret = temple.utils.shell('git rev-parse', stderr=subprocess.DEVNULL, check=False)\n return ret.returncode == 0", - "docstring": "Returns True if inside a git repo, False otherwise" - }, - { - "code": "def copy_attributes(source, destination, ignore_patterns=[]):\n for attr in _wildcard_filter(dir(source), *ignore_patterns):\n setattr(destination, attr, getattr(source, attr))", - "docstring": "Copy the attributes from a source object to a destination object." - }, - { - "code": "def create_joints(self):\n stack = ['root']\n while stack:\n parent = stack.pop()\n for child in self.hierarchy.get(parent, ()):\n stack.append(child)\n if parent not in self.bones:\n continue\n bone = self.bones[parent]\n body = [b for b in self.bodies if b.name == parent][0]\n for child in self.hierarchy.get(parent, ()):\n child_bone = self.bones[child]\n child_body = [b for b in self.bodies if b.name == child][0]\n shape = ('', 'hinge', 'universal', 'ball')[len(child_bone.dof)]\n self.joints.append(self.world.join(shape, body, child_body))", - "docstring": "Traverse the bone hierarchy and create physics joints." - }, - { - "code": "def lbl(axis, label, size=22):\n at = AnchoredText(label, loc=2, prop=dict(size=size), frameon=True)\n at.patch.set_boxstyle(\"round,pad=0.,rounding_size=0.0\")\n axis.add_artist(at)", - "docstring": "Put a figure label in an axis" - }, - { - "code": "def _generateExtraMetricSpecs(options):\n _metricSpecSchema = {'properties': {}}\n results = []\n for metric in options['metrics']:\n for propertyName in _metricSpecSchema['properties'].keys():\n _getPropertyValue(_metricSpecSchema, propertyName, metric)\n specString, label = _generateMetricSpecString(\n field=metric['field'],\n metric=metric['metric'],\n params=metric['params'],\n inferenceElement=\\\n metric['inferenceElement'],\n returnLabel=True)\n if metric['logged']:\n options['loggedMetrics'].append(label)\n results.append(specString)\n return results", - "docstring": "Generates the non-default metrics specified by the expGenerator params" - }, - { - "code": "def __get_menu_entries(self, kibiter_major):\n menu_entries = []\n for entry in self.panels_menu:\n if entry['source'] not in self.data_sources:\n continue\n parent_menu_item = {\n 'name': entry['name'],\n 'title': entry['name'],\n 'description': \"\",\n 'type': \"menu\",\n 'dashboards': []\n }\n for subentry in entry['menu']:\n try:\n dash_name = get_dashboard_name(subentry['panel'])\n except FileNotFoundError:\n logging.error(\"Can't open dashboard file %s\", subentry['panel'])\n continue\n child_item = {\n \"name\": subentry['name'],\n \"title\": subentry['name'],\n \"description\": \"\",\n \"type\": \"entry\",\n \"panel_id\": dash_name\n }\n parent_menu_item['dashboards'].append(child_item)\n menu_entries.append(parent_menu_item)\n return menu_entries", - "docstring": "Get the menu entries from the panel definition" - }, - { - "code": "def create_project(self):\n if os.path.exists(self._py):\n prj_dir = os.path.join(self._app_dir, self._project_name)\n if os.path.exists(prj_dir):\n if self._force:\n logging.warn('Removing existing project')\n shutil.rmtree(prj_dir)\n else:\n logging.warn('Found existing project; not creating (use --force to overwrite)')\n return\n logging.info('Creating project')\n p = subprocess.Popen('cd {0} ; {1} startproject {2} > /dev/null'.format(self._app_dir, self._ve_dir + os.sep + self._project_name + \\\n os.sep + 'bin' + os.sep + 'django-admin.py', self._project_name), \\\n shell=True)\n os.waitpid(p.pid, 0)\n else:\n logging.error('Unable to find Python interpreter in virtualenv')\n return", - "docstring": "Creates a base Django project" - }, - { - "code": "def ib64_patched(self, attrsD, contentparams):\n if attrsD.get(\"mode\", \"\") == \"base64\":\n return 0\n if self.contentparams[\"type\"].startswith(\"text/\"):\n return 0\n if self.contentparams[\"type\"].endswith(\"+xml\"):\n return 0\n if self.contentparams[\"type\"].endswith(\"/xml\"):\n return 0\n if self.contentparams[\"type\"].endswith(\"/json\"):\n return 0\n return 0", - "docstring": "Patch isBase64 to prevent Base64 encoding of JSON content" - }, - { - "code": "def tick(self):\n self.current += 1\n if self.current == self.factor:\n sys.stdout.write('+')\n sys.stdout.flush()\n self.current = 0", - "docstring": "Add one tick to progress bar" - }, - { - "code": "def sync_params(self):\n def _normalize(comps, param):\n vals = [c.get_values(param) for c in comps]\n diff = any([vals[i] != vals[i+1] for i in range(len(vals)-1)])\n if diff:\n for c in comps:\n c.set_values(param, vals[0])\n for param, comps in iteritems(self.lmap):\n if isinstance(comps, list) and len(comps) > 1:\n _normalize(comps, param)", - "docstring": "Ensure that shared parameters are the same value everywhere" - }, - { - "code": "def map(self, map_function):\n from heronpy.streamlet.impl.mapbolt import MapStreamlet\n map_streamlet = MapStreamlet(map_function, self)\n self._add_child(map_streamlet)\n return map_streamlet", - "docstring": "Return a new Streamlet by applying map_function to each element of this Streamlet." - }, - { - "code": "def _set_state(self, state):\n logger.debug(\" _set_state({0!r})\".format(state))\n self._state = state\n self._state_cond.notify()", - "docstring": "Set `_state` and notify any threads waiting for the change." - }, - { - "code": "def retract_project_bid(session, bid_id):\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n bid_data = {\n 'action': 'retract'\n }\n endpoint = 'bids/{}'.format(bid_id)\n response = make_put_request(session, endpoint, headers=headers,\n params_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n json_data = response.json()\n raise BidNotRetractedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Retract a bid on a project" - }, - { - "code": "def play_pause(self):\n self._player_interface.PlayPause()\n self._is_playing = not self._is_playing\n if self._is_playing:\n self.playEvent(self)\n else:\n self.pauseEvent(self)", - "docstring": "Pause playback if currently playing, otherwise start playing if currently paused." - }, - { - "code": "def course_enrollments(self, request, pk):\n enterprise_customer = self.get_object()\n serializer = serializers.EnterpriseCustomerCourseEnrollmentsSerializer(\n data=request.data,\n many=True,\n context={\n 'enterprise_customer': enterprise_customer,\n 'request_user': request.user,\n }\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=HTTP_200_OK)\n return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)", - "docstring": "Creates a course enrollment for an EnterpriseCustomerUser." - }, - { - "code": "def _tile(self, n):\n pos = self._trans(self.pos[n])\n return Tile(pos, pos).pad(self.support_pad)", - "docstring": "Get the update tile surrounding particle `n`" - }, - { - "code": "def __getDictMetaInfo(self, inferenceElement, inferenceDict):\n fieldMetaInfo = []\n inferenceLabel = InferenceElement.getLabel(inferenceElement)\n if InferenceElement.getInputElement(inferenceElement):\n fieldMetaInfo.append(FieldMetaInfo(name=inferenceLabel+\".actual\",\n type=FieldMetaType.string,\n special = ''))\n keys = sorted(inferenceDict.keys())\n for key in keys:\n fieldMetaInfo.append(FieldMetaInfo(name=inferenceLabel+\".\"+str(key),\n type=FieldMetaType.string,\n special=''))\n return fieldMetaInfo", - "docstring": "Get field metadate information for inferences that are of dict type" - }, - { - "code": "def intersects(self, i):\n return self.start <= i.end and i.start <= self.end", - "docstring": "Returns true iff this interval intersects the interval i" - }, - { - "code": "def matrixToDicts(data):\n if \"float\" in str(type(data[0])):\n d={}\n for x in range(len(data)):\n d[data.dtype.names[x]]=data[x]\n return d\n l=[]\n for y in range(len(data)):\n d={}\n for x in range(len(data[y])):\n d[data.dtype.names[x]]=data[y][x]\n l.append(d)\n return l", - "docstring": "given a recarray, return it as a list of dicts." - }, - { - "code": "def code(self, code):\n def decorator(exception):\n self[code] = exception\n return exception\n return decorator", - "docstring": "Decorator to associate a code to an exception" - }, - { - "code": "def cbuuid_to_uuid(cbuuid):\n data = cbuuid.data().bytes()\n template = '{:0>8}-0000-1000-8000-00805f9b34fb' if len(data) <= 4 else '{:0>32}'\n value = template.format(hexlify(data.tobytes()[:16]).decode('ascii'))\n return uuid.UUID(hex=value)", - "docstring": "Convert Objective-C CBUUID type to native Python UUID type." - }, - { - "code": "def newWallet(self, pwd):\n if self.created():\n raise WalletExists(\"You already have created a wallet!\")\n self.store.unlock(pwd)", - "docstring": "Create a new wallet database" - }, - { - "code": "def instantiate(repo, name=None, filename=None):\n default_transformers = repo.options.get('transformer', {})\n transformers = {}\n if name is not None:\n if name in default_transformers:\n transformers = {\n name : default_transformers[name]\n }\n else:\n transformers = {\n name : {\n 'files': [],\n }\n }\n else:\n transformers = default_transformers\n input_matching_files = None\n if filename is not None:\n input_matching_files = repo.find_matching_files([filename])\n for t in transformers:\n for k in transformers[t]:\n if \"files\" not in k:\n continue\n if k == \"files\" and input_matching_files is not None:\n transformers[t][k] = input_matching_files\n else:\n if transformers[t][k] is None or len(transformers[t][k]) == 0:\n transformers[t][k] = []\n else:\n matching_files = repo.find_matching_files(transformers[t][k])\n transformers[t][k] = matching_files\n return transformers", - "docstring": "Instantiate the generator and filename specification" - }, - { - "code": "def declare_browsable_routes(config):\n config.add_notfound_view(default_exceptionresponse_view,\n append_slash=True)\n add_route = config.add_route\n add_route('admin-index', '/a/')\n add_route('admin-moderation', '/a/moderation/')\n add_route('admin-api-keys', '/a/api-keys/')\n add_route('admin-add-site-messages', '/a/site-messages/',\n request_method='GET')\n add_route('admin-add-site-messages-POST', '/a/site-messages/',\n request_method='POST')\n add_route('admin-delete-site-messages', '/a/site-messages/',\n request_method='DELETE')\n add_route('admin-edit-site-message', '/a/site-messages/{id}/',\n request_method='GET')\n add_route('admin-edit-site-message-POST', '/a/site-messages/{id}/',\n request_method='POST')\n add_route('admin-content-status', '/a/content-status/')\n add_route('admin-content-status-single', '/a/content-status/{uuid}')\n add_route('admin-print-style', '/a/print-style/')\n add_route('admin-print-style-single', '/a/print-style/{style}')", - "docstring": "Declaration of routes that can be browsed by users." - }, - { - "code": "def add(self, *names):\n def decorator(blok):\n for name in names or (blok.__name__, ):\n self[name] = blok\n return blok\n return decorator", - "docstring": "Returns back a class decorator that enables registering Blox to this factory" - }, - { - "code": "def keys(self):\n return self.options.keys() + [p.name for p in self.positional_args]", - "docstring": "List names of options and positional arguments." - }, - { - "code": "def config_dir(self):\n home = expanduser('~')\n config_dir = os.path.join(home, '.jackal')\n return config_dir", - "docstring": "Returns the configuration directory" - }, - { - "code": "def schema(args):\n try:\n import south\n cmd = args and 'schemamigration %s' % ' '.join(options.args) or 'schemamigration'\n call_manage(cmd)\n except ImportError:\n error('Could not import south.')", - "docstring": "Run South's schemamigration command." - }, - { - "code": "def _expand_tuple(path_cfg, alias_dict, overriding_kargs):\n new_path_cfg = path_cfg[0]\n new_overriding_kargs = path_cfg[1].copy()\n new_overriding_kargs.update(overriding_kargs)\n return expand_path_cfg(\n new_path_cfg,\n overriding_kargs=new_overriding_kargs,\n alias_dict=alias_dict\n )", - "docstring": "expand a path config given as a tuple" - }, - { - "code": "def delete_vacation(_id):\n arequest = requests.delete(VACATIONS_URL + \"/\" + _id, headers=HEADERS)\n status_code = str(arequest.status_code)\n if status_code != '202':\n _LOGGER.error(\"Failed to delete vacation. \" + status_code)\n return False\n return True", - "docstring": "Delete a vacation by ID." - }, - { - "code": "def parse(self):\n if exists(self.filepath):\n content = open(self.filepath).read().decode(charset)\n else:\n content = \"\"\n try:\n config = toml.loads(content)\n except toml.TomlSyntaxError:\n raise ConfigSyntaxError\n return config", - "docstring": "parse config, return a dict" - }, - { - "code": "def _should_really_index(self, instance):\n if self._should_index_is_method:\n is_method = inspect.ismethod(self.should_index)\n try:\n count_args = len(inspect.signature(self.should_index).parameters)\n except AttributeError:\n count_args = len(inspect.getargspec(self.should_index).args)\n if is_method or count_args is 1:\n return self.should_index(instance)\n else:\n return self.should_index()\n else:\n attr_type = type(self.should_index)\n if attr_type is DeferredAttribute:\n attr_value = self.should_index.__get__(instance, None)\n elif attr_type is str:\n attr_value = getattr(instance, self.should_index)\n elif attr_type is property:\n attr_value = self.should_index.__get__(instance)\n else:\n raise AlgoliaIndexError('{} should be a boolean attribute or a method that returns a boolean.'.format(\n self.should_index))\n if type(attr_value) is not bool:\n raise AlgoliaIndexError(\"%s's should_index (%s) should be a boolean\" % (\n instance.__class__.__name__, self.should_index))\n return attr_value", - "docstring": "Return True if according to should_index the object should be indexed." - }, - { - "code": "def lookup_api_key_info():\n info = {}\n with db_connect() as conn:\n with conn.cursor() as cursor:\n cursor.execute(ALL_KEY_INFO_SQL_STMT)\n for row in cursor.fetchall():\n id, key, name, groups = row\n user_id = \"api_key:{}\".format(id)\n info[key] = dict(id=id, user_id=user_id,\n name=name, groups=groups)\n return info", - "docstring": "Given a dbapi cursor, lookup all the api keys and their information." - }, - { - "code": "def selected_exercise(func):\n @wraps(func)\n def inner(*args, **kwargs):\n exercise = Exercise.get_selected()\n return func(exercise, *args, **kwargs)\n return inner", - "docstring": "Passes the selected exercise as the first argument to func." - }, - { - "code": "async def qtm_version(self):\n return await asyncio.wait_for(\n self._protocol.send_command(\"qtmversion\"), timeout=self._timeout\n )", - "docstring": "Get the QTM version." - }, - { - "code": "def _set_options(self, options):\n if not options:\n return self.options.copy()\n options = options.copy()\n if 'magic' in options:\n self.set_magic(options['magic'])\n del(options['magic'])\n if 'flags' in options:\n flags = options['flags']\n del(options['flags'])\n for key, value in flags.iteritems():\n if not isinstance(value, bool):\n raise TypeError('Invalid flag type for: %s' % key)\n else:\n flags = self.options['flags']\n if 'info' in options:\n del(options['info'])\n for key, value in options.iteritems():\n if not isinstance(value, int):\n raise TypeError('Invalid option type for: %s' % key)\n if value < 0 or value > 255:\n raise ValueError('Option value out of range for: %s' % key)\n new_options = self.options.copy()\n new_options.update(options)\n new_options['flags'].update(flags)\n return new_options", - "docstring": "Private function for setting options used for sealing" - }, - { - "code": "def typecasted(func):\n signature = inspect.signature(func).parameters.items()\n @wraps(func)\n def wrapper(*args, **kwargs):\n args = list(args)\n new_args = []\n new_kwargs = {}\n for _, param in signature:\n converter = param.annotation\n if converter is inspect._empty:\n converter = lambda a: a\n if param.kind is param.POSITIONAL_OR_KEYWORD:\n if args:\n to_conv = args.pop(0)\n new_args.append(converter(to_conv))\n elif param.kind is param.VAR_POSITIONAL:\n for a in args:\n new_args.append(converter(a))\n else:\n for k, v in kwargs.items():\n nk, nv = converter(k, v)\n new_kwargs[nk] = nv\n return func(*new_args, **new_kwargs)\n return wrapper", - "docstring": "Decorator that converts arguments via annotations." - }, - { - "code": "def active_env_module_resolver(resolver, path):\n from .api import get_active_env\n env = get_active_env()\n if not env:\n raise ResolveError\n mod = env.get_module(path)\n if not mod:\n raise ResolveError\n return mod", - "docstring": "Resolves modules in currently active environment." - }, - { - "code": "def update_cnum(self):\n if \"ConferencePaper\" not in self.collections:\n cnums = record_get_field_values(self.record, '773', code=\"w\")\n for cnum in cnums:\n cnum_subs = [\n (\"9\", \"INSPIRE-CNUM\"),\n (\"a\", cnum)\n ]\n record_add_field(self.record, \"035\", subfields=cnum_subs)", - "docstring": "Check if we shall add cnum in 035." - }, - { - "code": "def verify_authority(self):\n try:\n if not self.blockchain.rpc.verify_authority(self.json()):\n raise InsufficientAuthorityError\n except Exception as e:\n raise e", - "docstring": "Verify the authority of the signed transaction" - }, - { - "code": "def split_multiline(value):\n return [element for element in (line.strip() for line in value.split('\\n'))\n if element]", - "docstring": "Split a multiline string into a list, excluding blank lines." - }, - { - "code": "def can_sequence(obj):\n if istype(obj, sequence_types):\n t = type(obj)\n return t([can(i) for i in obj])\n else:\n return obj", - "docstring": "Can the elements of a sequence." - }, - { - "code": "def log_file(self, url=None):\n if url is None:\n url = self.url\n f = re.sub(\"file://\", \"\", url)\n try:\n with open(f, \"a\") as of:\n of.write(str(self.store.get_json_tuples(True)))\n except IOError as e:\n print(e)\n print(\"Could not write the content to the file..\")", - "docstring": "Write to a local log file" - }, - { - "code": "def normalize_cutoff(model, zero_cutoff=None):\n if zero_cutoff is None:\n return model.tolerance\n else:\n if zero_cutoff < model.tolerance:\n raise ValueError(\n \"The chosen zero cutoff cannot be less than the model's \"\n \"tolerance value.\"\n )\n else:\n return zero_cutoff", - "docstring": "Return a valid zero cutoff value." - }, - { - "code": "def barv(d, plt, title=None, rotation='vertical'):\n labels = sorted(d, key=d.get, reverse=True)\n index = range(len(labels))\n plt.xticks(index, labels, rotation=rotation)\n plt.bar(index, [d[v] for v in labels])\n if title is not None:\n plt.title(title)", - "docstring": "A convenience function for plotting a vertical bar plot from a Counter" - }, - { - "code": "def right_complement(clr):\n right = split_complementary(clr)[2]\n colors = complementary(clr)\n colors[3].h = right.h\n colors[4].h = right.h\n colors[5].h = right.h\n colors = colorlist(\n colors[0], colors[2], colors[1], colors[5], colors[4], colors[3]\n )\n return colors", - "docstring": "Returns the right half of the split complement." - }, - { - "code": "def PrintErrorCounts(self):\n for category, count in sorted(iteritems(self.errors_by_category)):\n self.PrintInfo('Category \\'%s\\' errors found: %d\\n' %\n (category, count))\n if self.error_count > 0:\n self.PrintInfo('Total errors found: %d\\n' % self.error_count)", - "docstring": "Print a summary of errors by category, and the total." - }, - { - "code": "def from_spec(spec, kwargs=None):\n memory = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.core.memories.memories,\n kwargs=kwargs\n )\n assert isinstance(memory, Memory)\n return memory", - "docstring": "Creates a memory from a specification dict." - }, - { - "code": "def init(self):\n \"Initialize the message-digest and set all fields to zero.\"\n self.length = 0\n self.input = []\n self.H0 = 0x67452301\n self.H1 = 0xEFCDAB89\n self.H2 = 0x98BADCFE\n self.H3 = 0x10325476\n self.H4 = 0xC3D2E1F0", - "docstring": "Initialize the message-digest and set all fields to zero." - }, - { - "code": "def count_author_publications(graph: BELGraph) -> typing.Counter[str]:\n authors = group_as_dict(_iter_author_publiations(graph))\n return Counter(count_dict_values(count_defaultdict(authors)))", - "docstring": "Count the number of publications of each author to the given graph." - }, - { - "code": "def fillRGB(self, r, g, b, start=0, end=-1):\n self.fill((r, g, b), start, end)", - "docstring": "Fill entire strip by giving individual RGB values instead of tuple" - }, - { - "code": "def plot_shaded_data(X,Y,variances,varianceX):\n plt.plot(X,Y,color='k',lw=2)\n nChunks=int(len(Y)/CHUNK_POINTS)\n for i in range(0,100,PERCENT_STEP):\n varLimitLow=np.percentile(variances,i)\n varLimitHigh=np.percentile(variances,i+PERCENT_STEP)\n varianceIsAboveMin=np.where(variances>=varLimitLow)[0]\n varianceIsBelowMax=np.where(variances<=varLimitHigh)[0]\n varianceIsRange=[chunkNumber for chunkNumber in range(nChunks) \\\n if chunkNumber in varianceIsAboveMin \\\n and chunkNumber in varianceIsBelowMax]\n for chunkNumber in varianceIsRange:\n t1=chunkNumber*CHUNK_POINTS/POINTS_PER_SEC\n t2=t1+CHUNK_POINTS/POINTS_PER_SEC\n plt.axvspan(t1,t2,alpha=.3,color=COLORMAP(i/100),lw=0)", - "docstring": "plot X and Y data, then shade its background by variance." - }, - { - "code": "def update_hidden_notes(self):\n if not self.tag_as_cern:\n notes = record_get_field_instances(self.record,\n tag=\"595\")\n for field in notes:\n for dummy, value in field[0]:\n if value == \"CDS\":\n self.tag_as_cern = True\n record_delete_fields(self.record, tag=\"595\")", - "docstring": "Remove hidden notes and tag a CERN if detected." - }, - { - "code": "def paginator(self):\n if not hasattr(self, '_paginator'):\n if self.pagination_class is None:\n self._paginator = None\n else:\n self._paginator = self.pagination_class()\n return self._paginator", - "docstring": "The paginator instance associated with the view, or `None`." - }, - { - "code": "def _initfile(path, data=\"dict\"):\n data = {} if data.lower() == \"dict\" else []\n if not os.path.exists(path):\n dirname = os.path.dirname(path)\n if dirname and not os.path.exists(dirname):\n raise IOError(\n (\"Could not initialize empty JSON file in non-existant \"\n \"directory '{}'\").format(os.path.dirname(path))\n )\n with open(path, \"w\") as f:\n json.dump(data, f)\n return True\n elif os.path.getsize(path) == 0:\n with open(path, \"w\") as f:\n json.dump(data, f)\n else:\n return False", - "docstring": "Initialize an empty JSON file." - }, - { - "code": "def accept_project_bid(session, bid_id):\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n bid_data = {\n 'action': 'accept'\n }\n endpoint = 'bids/{}'.format(bid_id)\n response = make_put_request(session, endpoint, headers=headers,\n params_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n json_data = response.json()\n raise BidNotAcceptedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Accept a bid on a project" - }, - { - "code": "def _is_action_available_left(self, state):\n for row in range(4):\n has_empty = False\n for col in range(4):\n has_empty |= state[row, col] == 0\n if state[row, col] != 0 and has_empty:\n return True\n if (state[row, col] != 0 and col > 0 and\n state[row, col] == state[row, col - 1]):\n return True\n return False", - "docstring": "Determines whether action 'Left' is available." - }, - { - "code": "def connected_channel(self):\r\n if not self.channel_id:\r\n return None\r\n return self._lavalink.bot.get_channel(int(self.channel_id))", - "docstring": "Returns the voice channel the player is connected to." - }, - { - "code": "def rename(self, name):\n self._impl.system.rename_model(new_name=name, old_name=self.name)", - "docstring": "Rename the model itself" - }, - { - "code": "def sys_transmit(self, cpu, fd, buf, count, tx_bytes):\n if issymbolic(fd):\n logger.info(\"Ask to write to a symbolic file descriptor!!\")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 0)\n if issymbolic(buf):\n logger.info(\"Ask to write to a symbolic buffer\")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 1)\n if issymbolic(count):\n logger.info(\"Ask to write a symbolic number of bytes \")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 2)\n if issymbolic(tx_bytes):\n logger.info(\"Ask to return size to a symbolic address \")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 3)\n return super().sys_transmit(cpu, fd, buf, count, tx_bytes)", - "docstring": "Symbolic version of Decree.sys_transmit" - }, - { - "code": "def _bib_processor(self, retrieved):\n items = []\n for bib in retrieved.entries:\n items.append(bib[\"content\"][0][\"value\"])\n self.url_params = None\n return items", - "docstring": "Return a list of strings formatted as HTML bibliography entries" - }, - { - "code": "def cross_check_launchers(self, launchers):\n if len(launchers) == 0: raise Exception('Empty launcher list')\n timestamps = [launcher.timestamp for launcher in launchers]\n if not all(timestamps[0] == tstamp for tstamp in timestamps):\n raise Exception(\"Launcher timestamps not all equal. \"\n \"Consider setting timestamp explicitly.\")\n root_directories = []\n for launcher in launchers:\n command = launcher.command\n args = launcher.args\n command.verify(args)\n root_directory = launcher.get_root_directory()\n if os.path.isdir(root_directory):\n raise Exception(\"Root directory already exists: %r\" % root_directory)\n if root_directory in root_directories:\n raise Exception(\"Each launcher requires a unique root directory\")\n root_directories.append(root_directory)", - "docstring": "Performs consistency checks across all the launchers." - }, - { - "code": "def suppose(self, var, value):\n \"Start accumulating inferences from assuming var=value.\"\n self.support_pruning()\n removals = [(var, a) for a in self.curr_domains[var] if a != value]\n self.curr_domains[var] = [value]\n return removals", - "docstring": "Start accumulating inferences from assuming var=value." - }, - { - "code": "def top(self):\n o = self.get_ordering_queryset().aggregate(Min('order')).get('order__min')\n self.to(o)", - "docstring": "Move this object to the top of the ordered stack." - }, - { - "code": "def _step4func(self, samples, force, ipyclient):\n if self._headers:\n print(\"\\n Step 4: Joint estimation of error rate and heterozygosity\")\n samples = _get_samples(self, samples)\n if not self._samples_precheck(samples, 4, force):\n raise IPyradError(FIRST_RUN_3)\n elif not force:\n if all([i.stats.state >= 4 for i in samples]):\n print(JOINTS_EXIST.format(len(samples)))\n return\n assemble.jointestimate.run(self, samples, force, ipyclient)", - "docstring": "hidden wrapped function to start step 4" - }, - { - "code": "def kill(self):\n assert self.has_started(), \"called kill() on a non-active GeventLoop\"\n self._stop_event.set()\n self._greenlet.kill()\n self._clear()", - "docstring": "Kills the running loop and waits till it gets killed." - }, - { - "code": "def _remove_io_handler(self, handler):\n if handler not in self.io_handlers:\n return\n self.io_handlers.remove(handler)\n for thread in self.io_threads:\n if thread.io_handler is handler:\n thread.stop()", - "docstring": "Remove an IOHandler from the pool." - }, - { - "code": "def update_J(self):\n self.calc_J()\n step = np.ceil(1e-2 * self.J.shape[1]).astype('int')\n self.JTJ = low_mem_sq(self.J, step=step)\n self._fresh_JTJ = True\n self._J_update_counter = 0\n if np.any(np.isnan(self.JTJ)):\n raise FloatingPointError('J, JTJ have nans.')\n self._exp_err = self.error - self.find_expected_error(delta_params='perfect')", - "docstring": "Updates J, JTJ, and internal counters." - }, - { - "code": "def combine_files(self, f1, f2, f3):\n with open(os.path.join(self.datadir, f3), 'wb') as new_file:\n with open(os.path.join(self.datadir, f1), 'rb') as file_1:\n new_file.write(file_1.read())\n with open(os.path.join(self.datadir, f2), 'rb') as file_2:\n new_file.write(file_2.read())", - "docstring": "Combines the files 1 and 2 into 3." - }, - { - "code": "def update_missing(**kwargs):\n data_path = os.environ.get(BBG_ROOT, '').replace('\\\\', '/')\n if not data_path: return\n if len(kwargs) == 0: return\n log_path = f'{data_path}/Logs/{missing_info(**kwargs)}'\n cnt = len(files.all_files(log_path)) + 1\n files.create_folder(log_path)\n open(f'{log_path}/{cnt}.log', 'a').close()", - "docstring": "Update number of trials for missing values" - }, - { - "code": "def end(self):\n return Range(self.source_buffer, self.end_pos, self.end_pos,\n expanded_from=self.expanded_from)", - "docstring": "Returns a zero-length range located just after the end of this range." - }, - { - "code": "def userFolder():\n path=os.path.expanduser(\"~\")+\"/.swhlab/\"\n if not os.path.exists(path):\n print(\"creating\",path)\n os.mkdir(path)\n return os.path.abspath(path)", - "docstring": "return the semi-temporary user folder" - }, - { - "code": "def make_request_data(self, zipcode, city, state):\n data = {'key': self.api_key,\n 'postalcode': str(zipcode),\n 'city': city,\n 'state': state\n }\n data = ZipTaxClient._clean_request_data(data)\n return data", - "docstring": "Make the request params given location data" - }, - { - "code": "def check(self):\n if not self.is_valid:\n raise PolyaxonDeploymentConfigError(\n 'Deployment type `{}` not supported'.format(self.deployment_type))\n check = False\n if self.is_kubernetes:\n check = self.check_for_kubernetes()\n elif self.is_docker_compose:\n check = self.check_for_docker_compose()\n elif self.is_docker:\n check = self.check_for_docker()\n elif self.is_heroku:\n check = self.check_for_heroku()\n if not check:\n raise PolyaxonDeploymentConfigError(\n 'Deployment `{}` is not valid'.format(self.deployment_type))", - "docstring": "Add platform specific checks" - }, - { - "code": "def fileModifiedTimestamp(fname):\r\n modifiedTime=os.path.getmtime(fname)\r\n stamp=time.strftime('%Y-%m-%d', time.localtime(modifiedTime))\r\n return stamp", - "docstring": "return \"YYYY-MM-DD\" when the file was modified." - }, - { - "code": "def write(context):\n config = context.obj\n title = click.prompt('Title')\n author = click.prompt('Author', default=config.get('DEFAULT_AUTHOR'))\n slug = slugify(title)\n creation_date = datetime.now()\n basename = '{:%Y-%m-%d}_{}.md'.format(creation_date, slug)\n meta = (\n ('Title', title),\n ('Date', '{:%Y-%m-%d %H:%M}:00'.format(creation_date)),\n ('Modified', '{:%Y-%m-%d %H:%M}:00'.format(creation_date)),\n ('Author', author),\n )\n file_content = ''\n for key, value in meta:\n file_content += '{}: {}\\n'.format(key, value)\n file_content += '\\n\\n'\n file_content += 'Text...\\n\\n'\n file_content += '![image description]({filename}/images/my-photo.jpg)\\n\\n'\n file_content += 'Text...\\n\\n'\n os.makedirs(config['CONTENT_DIR'], exist_ok=True)\n path = os.path.join(config['CONTENT_DIR'], basename)\n with click.open_file(path, 'w') as f:\n f.write(file_content)\n click.echo(path)\n click.launch(path)", - "docstring": "Starts a new article" - }, - { - "code": "def available_templates(value):\n templates = list_templates()\n if value not in templates:\n raise ArgumentTypeError(\"Effect template '{}' does not exist.\\n Available templates: {} \".format(\n value, \", \".join(templates)))\n return value", - "docstring": "Scan for available templates in effect_templates" - }, - { - "code": "def _blocks(self, name):\n i = len(self)\n while i >= 0:\n i -= 1\n if name in self[i]['__names__']:\n for b in self[i]['__blocks__']:\n r = b.raw()\n if r and r == name:\n return b\n else:\n for b in self[i]['__blocks__']:\n r = b.raw()\n if r and name.startswith(r):\n b = utility.blocksearch(b, name)\n if b:\n return b\n return False", - "docstring": "Inner wrapper to search for blocks by name." - }, - { - "code": "def _build_chunk_headers(self):\n if hasattr(self, \"_chunk_headers\") and self._chunk_headers:\n return\n self._chunk_headers = {}\n for field in self._files:\n self._chunk_headers[field] = self._headers(field, True)\n for field in self._data:\n self._chunk_headers[field] = self._headers(field)", - "docstring": "Build headers for each field." - }, - { - "code": "def preview(self, components=None, ask=0):\n ask = int(ask)\n self.init()\n component_order, plan_funcs = self.get_component_funcs(components=components)\n print('\\n%i changes found for host %s.\\n' % (len(component_order), self.genv.host_string))\n if component_order and plan_funcs:\n if self.verbose:\n print('These components have changed:\\n')\n for component in sorted(component_order):\n print((' '*4)+component)\n print('Deployment plan for host %s:\\n' % self.genv.host_string)\n for func_name, _ in plan_funcs:\n print(success_str((' '*4)+func_name))\n if component_order:\n print()\n if ask and self.genv.host_string == self.genv.hosts[-1]:\n if component_order:\n if not raw_input('Begin deployment? [yn] ').strip().lower().startswith('y'):\n sys.exit(0)\n else:\n sys.exit(0)", - "docstring": "Inspects differences between the last deployment and the current code state." - }, - { - "code": "def create_session(self):\n req_url = '%s/%s' % (self.__webfsapi, 'CREATE_SESSION')\n sid = yield from self.__session.get(req_url, params=dict(pin=self.pin),\n timeout = self.timeout)\n text = yield from sid.text(encoding='utf-8')\n doc = objectify.fromstring(text)\n return doc.sessionId.text", - "docstring": "Create a session on the frontier silicon device." - }, - { - "code": "def write_temple_config(temple_config, template, version):\n with open(temple.constants.TEMPLE_CONFIG_FILE, 'w') as temple_config_file:\n versioned_config = {\n **temple_config,\n **{'_version': version, '_template': template},\n }\n yaml.dump(versioned_config, temple_config_file, Dumper=yaml.SafeDumper)", - "docstring": "Writes the temple YAML configuration" - }, - { - "code": "async def set_focus(self, set_focus_request):\n response = hangouts_pb2.SetFocusResponse()\n await self._pb_request('conversations/setfocus',\n set_focus_request, response)\n return response", - "docstring": "Set focus to a conversation." - }, - { - "code": "def _validate_job_and_task_arguments(job_params, task_descriptors):\n if not task_descriptors:\n return\n task_params = task_descriptors[0].task_params\n from_jobs = {label.name for label in job_params['labels']}\n from_tasks = {label.name for label in task_params['labels']}\n intersect = from_jobs & from_tasks\n if intersect:\n raise ValueError(\n 'Names for labels on the command-line and in the --tasks file must not '\n 'be repeated: {}'.format(','.join(intersect)))\n from_jobs = {\n item.name\n for item in job_params['envs'] | job_params['inputs']\n | job_params['outputs']\n }\n from_tasks = {\n item.name\n for item in task_params['envs'] | task_params['inputs']\n | task_params['outputs']\n }\n intersect = from_jobs & from_tasks\n if intersect:\n raise ValueError(\n 'Names for envs, inputs, and outputs on the command-line and in the '\n '--tasks file must not be repeated: {}'.format(','.join(intersect)))", - "docstring": "Validates that job and task argument names do not overlap." - }, - { - "code": "def count_list(the_list):\n count = the_list.count\n result = [(item, count(item)) for item in set(the_list)]\n result.sort()\n return result", - "docstring": "Generates a count of the number of times each unique item appears in a list" - }, - { - "code": "def text(what=\"sentence\", *args, **kwargs):\n if what == \"character\":\n return character(*args, **kwargs)\n elif what == \"characters\":\n return characters(*args, **kwargs)\n elif what == \"word\":\n return word(*args, **kwargs)\n elif what == \"words\":\n return words(*args, **kwargs)\n elif what == \"sentence\":\n return sentence(*args, **kwargs)\n elif what == \"sentences\":\n return sentences(*args, **kwargs)\n elif what == \"paragraph\":\n return paragraph(*args, **kwargs)\n elif what == \"paragraphs\":\n return paragraphs(*args, **kwargs)\n elif what == \"title\":\n return title(*args, **kwargs)\n else:\n raise NameError('No such method')", - "docstring": "An aggregator for all above defined public methods." - }, - { - "code": "def register_receivers(app, config):\n for event_name, event_config in config.items():\n event_builders = [\n obj_or_import_string(func)\n for func in event_config.get('event_builders', [])\n ]\n signal = obj_or_import_string(event_config['signal'])\n signal.connect(\n EventEmmiter(event_name, event_builders), sender=app, weak=False\n )", - "docstring": "Register signal receivers which send events." - }, - { - "code": "def _finalize_stats(self, ipyclient):\n print(FINALTREES.format(opr(self.trees.tree)))\n if self.params.nboots:\n self._compute_tree_stats(ipyclient)\n print(BOOTTREES.format(opr(self.trees.cons), opr(self.trees.boots))) \n if len(self.samples) < 20:\n if self.params.nboots:\n wctre = ete3.Tree(self.trees.cons, format=0)\n wctre.ladderize()\n print(wctre.get_ascii(show_internal=True, \n attributes=[\"dist\", \"name\"]))\n print(\"\")\n else:\n qtre = ete3.Tree(self.trees.tree, format=0)\n qtre.ladderize()\n print(qtre.get_ascii())\n print(\"\")\n docslink = \"https://toytree.readthedocs.io/\" \n citelink = \"https://ipyrad.readthedocs.io/tetrad.html\"\n print(LINKS.format(docslink, citelink))", - "docstring": "write final tree files" - }, - { - "code": "def dispatch(self, message, source = None):\n msgtype = \"\"\n try:\n if type(message[0]) == str:\n address = message[0]\n self.callbacks[address](message)\n elif type(message[0]) == list:\n for msg in message:\n self.dispatch(msg)\n except KeyError, key:\n print 'address %s not found, %s: %s' % (address, key, message)\n pprint.pprint(message)\n except IndexError, e:\n print '%s: %s' % (e, message)\n pass\n except None, e:\n print \"Exception in\", address, \"callback :\", e\n return", - "docstring": "Sends decoded OSC data to an appropriate calback" - }, - { - "code": "def _get_elements(self):\n for index, el in enumerate(self._elements):\n if isinstance(el, tuple):\n el = PathElement(*el)\n self._elements[index] = el\n yield el", - "docstring": "Yields all elements as PathElements" - }, - { - "code": "def delete(self, key_name):\n self.db.remove(Query().name == key_name)\n return self.get(key_name) == {}", - "docstring": "Delete the key and return true if the key was deleted, else false" - }, - { - "code": "def nn(input, layers_sizes, reuse=None, flatten=False, name=\"\"):\n for i, size in enumerate(layers_sizes):\n activation = tf.nn.relu if i < len(layers_sizes) - 1 else None\n input = tf.layers.dense(inputs=input,\n units=size,\n kernel_initializer=tf.contrib.layers.xavier_initializer(),\n reuse=reuse,\n name=name + '_' + str(i))\n if activation:\n input = activation(input)\n if flatten:\n assert layers_sizes[-1] == 1\n input = tf.reshape(input, [-1])\n return input", - "docstring": "Creates a simple neural network" - }, - { - "code": "def create_connection(conf):\n host_config = {}\n host_config['hosts'] = [conf.get('jackal', 'host')]\n if int(conf.get('jackal', 'use_ssl')):\n host_config['use_ssl'] = True\n if conf.get('jackal', 'ca_certs'):\n host_config['ca_certs'] = conf.get('jackal', 'ca_certs')\n if int(conf.get('jackal', 'client_certs')):\n host_config['client_cert'] = conf.get('jackal', 'client_cert')\n host_config['client_key'] = conf.get('jackal', 'client_key')\n host_config['ssl_assert_hostname'] = False\n connections.create_connection(**host_config)", - "docstring": "Creates a connection based upon the given configuration object." - }, - { - "code": "def receive(self):\n try:\n buffer = self._socket.recv(BUFFER_SIZE)\n except socket.timeout as error:\n _LOGGER.error(\"Error receiving: %s\", error)\n return \"\"\n buffering = True\n response = ''\n while buffering:\n if '\\n' in buffer.decode(\"utf8\"):\n response = buffer.decode(\"utf8\").split('\\n')[0]\n buffering = False\n else:\n try:\n more = self._socket.recv(BUFFER_SIZE)\n except socket.timeout:\n more = None\n if not more:\n buffering = False\n response = buffer.decode(\"utf8\")\n else:\n buffer += more\n return response", - "docstring": "Receive TCP response, looping to get whole thing or timeout." - }, - { - "code": "def deploy_blog():\n logger.info(deploy_blog.__doc__)\n call(\n 'rsync -aqu ' + join(dirname(__file__), 'res', '*') + ' .',\n shell=True)\n logger.success('Done')\n logger.info('Please edit config.toml to meet your needs')", - "docstring": "Deploy new blog to current directory" - }, - { - "code": "def filesByExtension(fnames):\n byExt={\"abf\":[],\"jpg\":[],\"tif\":[]}\n for fname in fnames:\n ext = os.path.splitext(fname)[1].replace(\".\",'').lower()\n if not ext in byExt.keys():\n byExt[ext]=[]\n byExt[ext]=byExt[ext]+[fname]\n return byExt", - "docstring": "given a list of files, return a dict organized by extension." - }, - { - "code": "def _file(self, file):\n if not self.__text_is_expected:\n file = BytesWrapper(file, self.__encoding)\n self.__dump_to_file(file)", - "docstring": "Dump the content to a `file`." - }, - { - "code": "def disconnect(self):\n if not self.socket:\n logging.warning(\"No active socket to close!\")\n return\n self.socket.close()\n self.socket = None", - "docstring": "Ends our server tcp connection." - }, - { - "code": "def decode(self, ciphertext):\n \"Search for a decoding of the ciphertext.\"\n self.ciphertext = ciphertext\n problem = PermutationDecoderProblem(decoder=self)\n return search.best_first_tree_search(\n problem, lambda node: self.score(node.state))", - "docstring": "Search for a decoding of the ciphertext." - }, - { - "code": "def clear(self):\n dict.clear(self)\n self.nodes = []\n self.edges = []\n self.root = None\n self.layout.i = 0\n self.alpha = 0", - "docstring": "Remove nodes and edges and reset the layout." - }, - { - "code": "def _ymd2ord(year, month, day):\n \"year, month, day -> ordinal, considering 01-Jan-0001 as day 1.\"\n assert 1 <= month <= 12, 'month must be in 1..12'\n dim = _days_in_month(year, month)\n assert 1 <= day <= dim, ('day must be in 1..%d' % dim)\n return (_days_before_year(year) +\n _days_before_month(year, month) +\n day)", - "docstring": "year, month, day -> ordinal, considering 01-Jan-0001 as day 1." - }, - { - "code": "def describe_events(self, environment_name, next_token=None, start_time=None):\n events = self.ebs.describe_events(\n application_name=self.app_name,\n environment_name=environment_name,\n next_token=next_token,\n start_time=start_time + 'Z')\n return (events['DescribeEventsResponse']['DescribeEventsResult']['Events'], events['DescribeEventsResponse']['DescribeEventsResult']['NextToken'])", - "docstring": "Describes events from the given environment" - }, - { - "code": "def respawn(name=None, group=None):\n if name is None:\n name = get_name()\n delete(name=name, group=group)\n instance = get_or_create(name=name, group=group)\n env.host_string = instance.public_dns_name", - "docstring": "Deletes and recreates one or more VM instances." - }, - { - "code": "def delete(self):\n i = self.index()\n if i != None: del self.canvas.layers[i]", - "docstring": "Removes this layer from the canvas." - }, - { - "code": "def _translate_residue(self, selection, default_atomname='CA'):\n m = self.RESIDUE.match(selection)\n if not m:\n errmsg = \"Selection {selection!r} is not valid.\".format(**vars())\n logger.error(errmsg)\n raise ValueError(errmsg)\n gmx_resid = self.gmx_resid(int(m.group('resid')))\n residue = m.group('aa')\n if len(residue) == 1:\n gmx_resname = utilities.convert_aa_code(residue)\n else:\n gmx_resname = residue\n gmx_atomname = m.group('atom')\n if gmx_atomname is None:\n gmx_atomname = default_atomname\n return {'resname':gmx_resname, 'resid':gmx_resid, 'atomname':gmx_atomname}", - "docstring": "Translate selection for a single res to make_ndx syntax." - }, - { - "code": "def conference_list(self, call_params):\n path = '/' + self.api_version + '/ConferenceList/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference List Helper" - }, - { - "code": "def present(self, results):\n \"Present the results as a list.\"\n for (score, d) in results:\n doc = self.documents[d]\n print (\"%5.2f|%25s | %s\"\n % (100 * score, doc.url, doc.title[:45].expandtabs()))", - "docstring": "Present the results as a list." - }, - { - "code": "def package_for_editor_signavio(self, spec, filename):\n signavio_file = filename[:-len('.bpmn20.xml')] + '.signavio.xml'\n if os.path.exists(signavio_file):\n self.write_file_to_package_zip(\n \"src/\" + self._get_zip_path(signavio_file), signavio_file)\n f = open(signavio_file, 'r')\n try:\n signavio_tree = ET.parse(f)\n finally:\n f.close()\n svg_node = one(signavio_tree.findall('.//svg-representation'))\n self.write_to_package_zip(\"%s.svg\" % spec.name, svg_node.text)", - "docstring": "Adds the SVG files to the archive for this BPMN file." - }, - { - "code": "def swap_buffers(self):\r\n if not self.window.context:\r\n return\r\n self.frames += 1\r\n self.window.flip()\r\n self.window.dispatch_events()", - "docstring": "Swap buffers, increment frame counter and pull events" - }, - { - "code": "def fetch(self, key: object, default=None):\r\n return self._user_data.get(key, default)", - "docstring": "Retrieves the related value from the stored user data." - }, - { - "code": "def without_tz(request):\n t = Template('{% load tz %}{% get_current_timezone as TIME_ZONE %}{{ TIME_ZONE }}') \n c = RequestContext(request)\n response = t.render(c)\n return HttpResponse(response)", - "docstring": "Get the time without TZ enabled" - }, - { - "code": "def _get_Ks(self):\n \"Ks as an array and type-checked.\"\n Ks = as_integer_type(self.Ks)\n if Ks.ndim != 1:\n raise TypeError(\"Ks should be 1-dim, got shape {}\".format(Ks.shape))\n if Ks.min() < 1:\n raise ValueError(\"Ks should be positive; got {}\".format(Ks.min()))\n return Ks", - "docstring": "Ks as an array and type-checked." - }, - { - "code": "def setup():\n\t\tif not os.path.isdir(AtomShieldsScanner.CHECKERS_DIR):\n\t\t\tos.makedirs(AtomShieldsScanner.CHECKERS_DIR)\n\t\tif not os.path.isdir(AtomShieldsScanner.REPORTS_DIR):\n\t\t\tos.makedirs(AtomShieldsScanner.REPORTS_DIR)\n\t\tfor f in AtomShieldsScanner._getFiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), \"checkers\"), \"*.py\"):\n\t\t\tAtomShieldsScanner.installChecker(f)\n\t\tfor f in AtomShieldsScanner._getFiles(os.path.join(os.path.dirname(os.path.realpath(__file__)), \"reports\"), \"*.py\"):\n\t\t\tAtomShieldsScanner.installReport(f)\n\t\tAtomShieldsScanner._executeMassiveMethod(path=AtomShieldsScanner.CHECKERS_DIR, method=\"install\", args={})\n\t\tconfig_dir = os.path.dirname(AtomShieldsScanner.CONFIG_PATH)\n\t\tif not os.path.isdir(config_dir):\n\t\t\tos.makedirs(config_dir)", - "docstring": "Creates required directories and copy checkers and reports." - }, - { - "code": "def load_resource_module(self):\n try:\n name = '{}.{}'.format(self.name, 'dependencies')\n self.dependencies_module = importlib.import_module(name)\n except ModuleNotFoundError as err:\n raise EffectError(\n (\n \"Effect package '{}' has no 'dependencies' module or the module has errors. \"\n \"Forwarded error from importlib: {}\"\n ).format(self.name, err))\n try:\n self.resources = getattr(self.dependencies_module, 'resources')\n except AttributeError:\n raise EffectError(\"Effect dependencies module '{}' has no 'resources' attribute\".format(name))\n if not isinstance(self.resources, list):\n raise EffectError(\n \"Effect dependencies module '{}': 'resources' is of type {} instead of a list\".format(\n name, type(self.resources)))\n try:\n self.effect_packages = getattr(self.dependencies_module, 'effect_packages')\n except AttributeError:\n raise EffectError(\"Effect dependencies module '{}' has 'effect_packages' attribute\".format(name))\n if not isinstance(self.effect_packages, list):\n raise EffectError(\n \"Effect dependencies module '{}': 'effect_packages' is of type {} instead of a list\".format(\n name, type(self.effects)))", - "docstring": "Fetch the resource list" - }, - { - "code": "def decode_html_entities(html):\n if not html:\n return html\n for entity, char in six.iteritems(html_entity_map):\n html = html.replace(entity, char)\n return html", - "docstring": "Decodes a limited set of HTML entities." - }, - { - "code": "def posarghelp(self, indent=0, maxindent=25, width=79):\n docs = []\n makelabel = lambda posarg: ' ' * indent + posarg.displayname + ': '\n helpindent = _autoindent([makelabel(p) for p in self.positional_args], indent, maxindent)\n for posarg in self.positional_args:\n label = makelabel(posarg)\n text = posarg.formatname + '. ' + posarg.docs\n wrapped = self._wrap_labelled(label, text, helpindent, width)\n docs.extend(wrapped)\n return '\\n'.join(docs)", - "docstring": "Return user friendly help on positional arguments in the program." - }, - { - "code": "def main_make_views(gtfs_fname):\n print(\"creating views\")\n conn = GTFS(fname_or_conn=gtfs_fname).conn\n for L in Loaders:\n L(None).make_views(conn)\n conn.commit()", - "docstring": "Re-create all views." - }, - { - "code": "def __load_jams_schema():\n schema_file = os.path.join(SCHEMA_DIR, 'jams_schema.json')\n jams_schema = None\n with open(resource_filename(__name__, schema_file), mode='r') as fdesc:\n jams_schema = json.load(fdesc)\n if jams_schema is None:\n raise JamsError('Unable to load JAMS schema')\n return jams_schema", - "docstring": "Load the schema file from the package." - }, - { - "code": "async def _queue(self, ctx, page: int = 1):\r\n player = self.bot.lavalink.players.get(ctx.guild.id)\r\n if not player.queue:\r\n return await ctx.send('There\\'s nothing in the queue! Why not queue something?')\r\n items_per_page = 10\r\n pages = math.ceil(len(player.queue) / items_per_page)\r\n start = (page - 1) * items_per_page\r\n end = start + items_per_page\r\n queue_list = ''\r\n for index, track in enumerate(player.queue[start:end], start=start):\r\n queue_list += f'`{index + 1}.` [**{track.title}**]({track.uri})\\n'\r\n embed = discord.Embed(colour=discord.Color.blurple(),\r\n description=f'**{len(player.queue)} tracks**\\n\\n{queue_list}')\r\n embed.set_footer(text=f'Viewing page {page}/{pages}')\r\n await ctx.send(embed=embed)", - "docstring": "Shows the player's queue." - }, - { - "code": "def upload_backend(index='dev', user=None):\n get_vars()\n use_devpi(index=index)\n with fab.lcd('../application'):\n fab.local('make upload')", - "docstring": "Build the backend and upload it to the remote server at the given index" - }, - { - "code": "def _sm_stop_from_pain(self, *args, **kwargs):\n _logger.info(\"Stopping chaos for blockade %s\" % self._blockade_name)\n self._do_reset_all()", - "docstring": "Stop chaos while there is a blockade event in progress" - }, - { - "code": "def complement(clr):\n clr = color(clr)\n colors = colorlist(clr)\n colors.append(clr.complement)\n return colors", - "docstring": "Returns the color and its complement in a list." - }, - { - "code": "def get(self, model):\n for tag in model.tags:\n if self.is_tag(tag):\n value = self.deserialize(tag)\n try:\n self.validate(value)\n return value\n except TagValidationError:\n continue\n return None", - "docstring": "Get a matching valid tag off the model." - }, - { - "code": "def _group_tasks_by_jobid(tasks):\n ret = collections.defaultdict(list)\n for t in tasks:\n ret[t.get_field('job-id')].append(t)\n return ret", - "docstring": "A defaultdict with, for each job, a list of its tasks." - }, - { - "code": "def render(self):\n template_name = '{0}.jinja2'.format(self.get_name())\n template = self.template_env.get_template(template_name)\n context = getattr(self.backend, 'intermediate_data', {})\n output = template.render(data=context)\n return self.cleanup(output)", - "docstring": "Renders configuration by using the jinja2 templating engine" - }, - { - "code": "def BYTE(self, offset, value):\n offset = Operators.ITEBV(256, offset < 32, (31 - offset) * 8, 256)\n return Operators.ZEXTEND(Operators.EXTRACT(value, offset, 8), 256)", - "docstring": "Retrieve single byte from word" - }, - { - "code": "def print_information(handler, label):\n click.echo('=> Latest stable: {tag}'.format(\n tag=click.style(str(handler.latest_stable or 'N/A'), fg='yellow' if\n handler.latest_stable else 'magenta')\n ))\n if label is not None:\n latest_revision = handler.latest_revision(label)\n click.echo('=> Latest relative revision ({label}): {tag}'.format(\n label=click.style(label, fg='blue'),\n tag=click.style(str(latest_revision or 'N/A'),\n fg='yellow' if latest_revision else 'magenta')\n ))", - "docstring": "Prints latest tag's information" - }, - { - "code": "def make_message_multipart(message):\n if not message.is_multipart():\n multipart_message = email.mime.multipart.MIMEMultipart('alternative')\n for header_key in set(message.keys()):\n values = message.get_all(header_key, failobj=[])\n for value in values:\n multipart_message[header_key] = value\n original_text = message.get_payload()\n multipart_message.attach(email.mime.text.MIMEText(original_text))\n message = multipart_message\n message = _create_boundary(message)\n return message", - "docstring": "Convert a message into a multipart message." - }, - { - "code": "def _dictfetchall(self, cursor):\n columns = [col[0] for col in cursor.description]\n return [\n dict(zip(columns, row))\n for row in cursor.fetchall()\n ]", - "docstring": "Return all rows from a cursor as a dict." - }, - { - "code": "def eof(fd):\n b = fd.read(1)\n end = len(b) == 0\n if not end:\n curpos = fd.tell()\n fd.seek(curpos - 1)\n return end", - "docstring": "Determine if end-of-file is reached for file fd." - }, - { - "code": "def remove(self):\n if self.disco is None:\n return\n self.xmlnode.unlinkNode()\n oldns=self.xmlnode.ns()\n ns=self.xmlnode.newNs(oldns.getContent(),None)\n self.xmlnode.replaceNs(oldns,ns)\n common_root.addChild(self.xmlnode())\n self.disco=None", - "docstring": "Remove `self` from the containing `DiscoItems` object." - }, - { - "code": "def show(dataset_uri):\n try:\n dataset = dtoolcore.ProtoDataSet.from_uri(\n uri=dataset_uri,\n config_path=CONFIG_PATH\n )\n except dtoolcore.DtoolCoreTypeError:\n dataset = dtoolcore.DataSet.from_uri(\n uri=dataset_uri,\n config_path=CONFIG_PATH\n )\n readme_content = dataset.get_readme_content()\n click.secho(readme_content)", - "docstring": "Show the descriptive metadata in the readme." - }, - { - "code": "def _get_base(server_certificate, **conn):\n server_certificate['_version'] = 1\n cert_details = get_server_certificate_api(server_certificate['ServerCertificateName'], **conn)\n if cert_details:\n server_certificate.update(cert_details['ServerCertificateMetadata'])\n server_certificate['CertificateBody'] = cert_details['CertificateBody']\n server_certificate['CertificateChain'] = cert_details.get('CertificateChain', None)\n server_certificate['UploadDate'] = get_iso_string(server_certificate['UploadDate'])\n server_certificate['Expiration'] = get_iso_string(server_certificate['Expiration'])\n return server_certificate", - "docstring": "Fetch the base IAM Server Certificate." - }, - { - "code": "def update(self):\n if self.delay > 0:\n self.delay -= 1; return\n if self.fi == 0:\n if len(self.q) == 1: \n self.fn = float(\"inf\")\n else:\n self.fn = len(self.q[self.i]) / self.speed\n self.fn = max(self.fn, self.mf) \n self.fi += 1\n if self.fi > self.fn:\n self.fi = 0\n self.i = (self.i+1) % len(self.q)", - "docstring": "Rotates the queued texts and determines display time." - }, - { - "code": "def _file_path(self, uid):\n file_name = '%s.doentry' % (uid)\n return os.path.join(self.dayone_journal_path, file_name)", - "docstring": "Create and return full file path for DayOne entry" - }, - { - "code": "def strip_fields(self):\n for tag in self.record.keys():\n if tag in self.fields_list:\n record_delete_fields(self.record, tag)", - "docstring": "Clear any fields listed in field_list." - }, - { - "code": "def transmit_learner_data(self, user):\n exporter = self.get_learner_data_exporter(user)\n transmitter = self.get_learner_data_transmitter()\n transmitter.transmit(exporter)", - "docstring": "Iterate over each learner data record and transmit it to the integrated channel." - }, - { - "code": "def _read_version(self, data):\n version = ord(data[0])\n if version not in self.VERSIONS:\n raise Exception('Version not defined: %d' % version)\n return version", - "docstring": "Read header version from data" - }, - { - "code": "def convert(self, schema_node, definition_handler):\n converted = {\n 'name': schema_node.name,\n 'in': self._in,\n 'required': schema_node.required\n }\n if schema_node.description:\n converted['description'] = schema_node.description\n if schema_node.default:\n converted['default'] = schema_node.default\n schema = definition_handler(schema_node)\n schema.pop('title', None)\n converted.update(schema)\n if schema.get('type') == 'array':\n converted['items'] = {'type': schema['items']['type']}\n return converted", - "docstring": "Convert node schema into a parameter object." - }, - { - "code": "def rem_active_module(module):\n modules = set(get_active_modules())\n modules.discard(module)\n new_modules_path = os.pathsep.join([m.path for m in modules])\n os.environ['CPENV_ACTIVE_MODULES'] = str(new_modules_path)", - "docstring": "Remove a module from CPENV_ACTIVE_MODULES environment variable" - }, - { - "code": "def all(self, list_id, **queryparams):\n return self._mc_client._get(url=self._build_path(list_id, 'segments'), **queryparams)", - "docstring": "returns the first 10 segments for a specific list." - }, - { - "code": "def concatclusts(outhandle, alignbits):\n with gzip.open(outhandle, 'wb') as out:\n for fname in alignbits:\n with open(fname) as infile:\n out.write(infile.read()+\"//\\n//\\n\")", - "docstring": "concatenates sorted aligned cluster tmpfiles and removes them." - }, - { - "code": "def record_conflict(self, assignment, var, val, delta):\n \"Record conflicts caused by addition or deletion of a Queen.\"\n n = len(self.vars)\n self.rows[val] += delta\n self.downs[var + val] += delta\n self.ups[var - val + n - 1] += delta", - "docstring": "Record conflicts caused by addition or deletion of a Queen." - }, - { - "code": "def EnsembleLearner(learners):\n def train(dataset):\n predictors = [learner(dataset) for learner in learners]\n def predict(example):\n return mode(predictor(example) for predictor in predictors)\n return predict\n return train", - "docstring": "Given a list of learning algorithms, have them vote." - }, - { - "code": "def bare(self):\n \"Make a Features object with no metadata; points to the same features.\"\n if not self.meta:\n return self\n elif self.stacked:\n return Features(self.stacked_features, self.n_pts, copy=False)\n else:\n return Features(self.features, copy=False)", - "docstring": "Make a Features object with no metadata; points to the same features." - }, - { - "code": "def price_unit(self):\n currency = self.currency\n consumption_unit = self.consumption_unit\n if not currency or not consumption_unit:\n _LOGGER.error(\"Could not find price_unit.\")\n return \" \"\n return currency + \"/\" + consumption_unit", - "docstring": "Return the price unit." - }, - { - "code": "def _flatten_tree(tree, old_path=None):\n flat_tree = []\n for key, value in tree.items():\n new_path = \"/\".join([old_path, key]) if old_path else key\n if isinstance(value, dict) and \"format\" not in value:\n flat_tree.extend(_flatten_tree(value, old_path=new_path))\n else:\n flat_tree.append((new_path, value))\n return flat_tree", - "docstring": "Flatten dict tree into dictionary where keys are paths of old dict." - }, - { - "code": "def scaled_limits(self):\n _min = self.limits[0]/self.factor\n _max = self.limits[1]/self.factor\n return _min, _max", - "docstring": "Minimum and Maximum to use for computing breaks" - }, - { - "code": "def write():\n click.echo(\"Fantastic. Let's get started. \")\n title = click.prompt(\"What's the title?\")\n url = slugify(title)\n url = click.prompt(\"What's the URL?\", default=url)\n click.echo(\"Got it. Creating %s...\" % url)\n scaffold_piece(title, url)", - "docstring": "Start a new piece" - }, - { - "code": "async def trigger(self, event, data=None, socket_id=None):\n json_data = json.dumps(data, cls=self.pusher.encoder)\n query_string = self.signed_query(event, json_data, socket_id)\n signed_path = \"%s?%s\" % (self.path, query_string)\n pusher = self.pusher\n absolute_url = pusher.get_absolute_path(signed_path)\n response = await pusher.http.post(\n absolute_url, data=json_data,\n headers=[('Content-Type', 'application/json')])\n response.raise_for_status()\n return response.status_code == 202", - "docstring": "Trigger an ``event`` on this channel" - }, - { - "code": "def add_tweets(self, url, last_modified, tweets):\n try:\n self.cache[url] = {\"last_modified\": last_modified, \"tweets\": tweets}\n self.mark_updated()\n return True\n except TypeError:\n return False", - "docstring": "Adds new tweets to the cache." - }, - { - "code": "def validate_sceneInfo(self):\n if self.sceneInfo.prefix not in self.__prefixesValid:\n raise WrongSceneNameError('AWS: Prefix of %s (%s) is invalid'\n % (self.sceneInfo.name, self.sceneInfo.prefix))", - "docstring": "Check whether sceneInfo is valid to download from AWS Storage." - }, - { - "code": "def getPredictionResults(network, clRegionName):\n classifierRegion = network.regions[clRegionName]\n actualValues = classifierRegion.getOutputData(\"actualValues\")\n probabilities = classifierRegion.getOutputData(\"probabilities\")\n steps = classifierRegion.getSelf().stepsList\n N = classifierRegion.getSelf().maxCategoryCount\n results = {step: {} for step in steps}\n for i in range(len(steps)):\n stepProbabilities = probabilities[i * N:(i + 1) * N - 1]\n mostLikelyCategoryIdx = stepProbabilities.argmax()\n predictedValue = actualValues[mostLikelyCategoryIdx]\n predictionConfidence = stepProbabilities[mostLikelyCategoryIdx]\n results[steps[i]][\"predictedValue\"] = predictedValue\n results[steps[i]][\"predictionConfidence\"] = predictionConfidence\n return results", - "docstring": "Get prediction results for all prediction steps." - }, - { - "code": "def write_bel_namespace_mappings(self, file: TextIO, **kwargs) -> None:\n json.dump(self._get_namespace_identifier_to_name(**kwargs), file, indent=2, sort_keys=True)", - "docstring": "Write a BEL namespace mapping file." - }, - { - "code": "def output_eol_literal_marker(self, m):\n marker = ':' if m.group(1) is None else ''\n return self.renderer.eol_literal_marker(marker)", - "docstring": "Pass through rest link." - }, - { - "code": "def keypress(self, size, key):\n key = super().keypress(size, key)\n num_tabs = len(self._widgets)\n if key == self._keys['prev_tab']:\n self._tab_index = (self._tab_index - 1) % num_tabs\n self._update_tabs()\n elif key == self._keys['next_tab']:\n self._tab_index = (self._tab_index + 1) % num_tabs\n self._update_tabs()\n elif key == self._keys['close_tab']:\n if self._tab_index > 0:\n curr_tab = self._widgets[self._tab_index]\n self._widgets.remove(curr_tab)\n del self._widget_title[curr_tab]\n self._tab_index -= 1\n self._update_tabs()\n else:\n return key", - "docstring": "Handle keypresses for changing tabs." - }, - { - "code": "def write_file(obj, path, oned_as='row', convert_to_float=True):\r\n data = _encode(obj, convert_to_float)\r\n try:\r\n with _WRITE_LOCK:\r\n savemat(path, data, appendmat=False, oned_as=oned_as,\r\n long_field_names=True)\r\n except KeyError:\n raise Exception('could not save mat file')", - "docstring": "Save a Python object to an Octave file on the given path." - }, - { - "code": "def parse(svg, cached=False, _copy=True):\n if not cached:\n dom = parser.parseString(svg)\n paths = parse_node(dom, [])\n else:\n id = _cache.id(svg)\n if not _cache.has_key(id):\n dom = parser.parseString(svg)\n _cache.save(id, parse_node(dom, []))\n paths = _cache.load(id, _copy)\n return paths", - "docstring": "Returns cached copies unless otherwise specified." - }, - { - "code": "def conference_mute(self, call_params):\n path = '/' + self.api_version + '/ConferenceMute/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference Mute helper" - }, - { - "code": "def schemaforms(self):\n _schemaforms = {\n k: v['schemaform']\n for k, v in self.app.config['DEPOSIT_RECORDS_UI_ENDPOINTS'].items()\n if 'schemaform' in v\n }\n return defaultdict(\n lambda: self.app.config['DEPOSIT_DEFAULT_SCHEMAFORM'], _schemaforms\n )", - "docstring": "Load deposit schema forms." - }, - { - "code": "def prepare(self):\n attributes, elements = OrderedDict(), []\n nsmap = dict([self.meta.namespace])\n for name, item in self._items.items():\n if isinstance(item, Attribute):\n attributes[name] = item.prepare(self)\n elif isinstance(item, Element):\n nsmap.update([item.namespace])\n elements.append(item)\n return attributes, elements, nsmap", - "docstring": "Prepare the date in the instance state for serialization." - }, - { - "code": "def put(self, coro):\n assert asyncio.iscoroutine(coro)\n self._queue.put_nowait(coro)", - "docstring": "Put a coroutine in the queue to be executed." - }, - { - "code": "def read_xl(xl_path: str):\n xl_path, choice = _check_xl_path(xl_path)\n reader = XL_READERS[choice]\n return reader(xl_path)", - "docstring": "Return the workbook from the Excel file in `xl_path`." - }, - { - "code": "def kernel_gaussian(self, sizeMS, sigmaMS=None, forwardOnly=False):\n sigmaMS=sizeMS/10 if sigmaMS is None else sigmaMS\n size,sigma=sizeMS*self.pointsPerMs,sigmaMS*self.pointsPerMs\n self.kernel=swhlab.common.kernel_gaussian(size,sigma,forwardOnly)\n return self.kernel", - "docstring": "create kernel based on this ABF info." - }, - { - "code": "def account_number():\n account = [random.randint(1, 9) for _ in range(20)]\n return \"\".join(map(str, account))", - "docstring": "Return a random bank account number." - }, - { - "code": "def _exception_handler(self, _loop, context):\n self._coroutine_queue.put(self._client.disconnect())\n default_exception = Exception(context.get('message'))\n self._exception = context.get('exception', default_exception)", - "docstring": "Handle exceptions from the asyncio loop." - }, - { - "code": "def AddFilters(self, filters):\n for filt in filters.split(','):\n clean_filt = filt.strip()\n if clean_filt:\n self.filters.append(clean_filt)\n for filt in self.filters:\n if not (filt.startswith('+') or filt.startswith('-')):\n raise ValueError('Every filter in --filters must start with + or -'\n ' (%s does not)' % filt)", - "docstring": "Adds more filters to the existing list of error-message filters." - }, - { - "code": "def reverse_id(self):\n return '_'.join((self.id, 'reverse',\n hashlib.md5(\n self.id.encode('utf-8')).hexdigest()[0:5]))", - "docstring": "Generate the id of reverse_variable from the reaction's id." - }, - { - "code": "def refresh_token(func):\n @wraps(func)\n def inner(self, *args, **kwargs):\n if self.token_expired():\n self.connect()\n return func(self, *args, **kwargs)\n return inner", - "docstring": "Use this method decorator to ensure the JWT token is refreshed when needed." - }, - { - "code": "def determine_type(x):\n types = (int, float, str)\n _type = filter(lambda a: is_type(a, x), types)[0]\n return _type(x)", - "docstring": "Determine the type of x" - }, - { - "code": "async def fire(self, *args, **kwargs):\n logger.debug('Fired {}'.format(self))\n for observer in self._observers:\n gen = observer(*args, **kwargs)\n if asyncio.iscoroutinefunction(observer):\n await gen", - "docstring": "Fire this event, calling all observers with the same arguments." - }, - { - "code": "async def connect(self):\n if not self._consumer:\n waiter = self._waiter = asyncio.Future()\n try:\n address = self._websocket_host()\n self.logger.info('Connect to %s', address)\n self._consumer = await self.http.get(address)\n if self._consumer.status_code != 101:\n raise PusherError(\"Could not connect to websocket\")\n except Exception as exc:\n waiter.set_exception(exc)\n raise\n else:\n await waiter\n return self._consumer", - "docstring": "Connect to a Pusher websocket" - }, - { - "code": "def wait_for_master_to_start(single_master):\n i = 0\n while True:\n try:\n r = requests.get(\"http://%s:4646/v1/status/leader\" % single_master)\n if r.status_code == 200:\n break\n except:\n Log.debug(sys.exc_info()[0])\n Log.info(\"Waiting for cluster to come up... %s\" % i)\n time.sleep(1)\n if i > 10:\n Log.error(\"Failed to start Nomad Cluster!\")\n sys.exit(-1)\n i = i + 1", - "docstring": "Wait for a nomad master to start" - }, - { - "code": "def summarize_stability(graph: BELGraph) -> Mapping[str, int]:\n regulatory_pairs = get_regulatory_pairs(graph)\n chaotic_pairs = get_chaotic_pairs(graph)\n dampened_pairs = get_dampened_pairs(graph)\n contraditory_pairs = get_contradiction_summary(graph)\n separately_unstable_triples = get_separate_unstable_correlation_triples(graph)\n mutually_unstable_triples = get_mutually_unstable_correlation_triples(graph)\n jens_unstable_triples = get_jens_unstable(graph)\n increase_mismatch_triples = get_increase_mismatch_triplets(graph)\n decrease_mismatch_triples = get_decrease_mismatch_triplets(graph)\n chaotic_triples = get_chaotic_triplets(graph)\n dampened_triples = get_dampened_triplets(graph)\n return {\n 'Regulatory Pairs': _count_or_len(regulatory_pairs),\n 'Chaotic Pairs': _count_or_len(chaotic_pairs),\n 'Dampened Pairs': _count_or_len(dampened_pairs),\n 'Contradictory Pairs': _count_or_len(contraditory_pairs),\n 'Separately Unstable Triples': _count_or_len(separately_unstable_triples),\n 'Mutually Unstable Triples': _count_or_len(mutually_unstable_triples),\n 'Jens Unstable Triples': _count_or_len(jens_unstable_triples),\n 'Increase Mismatch Triples': _count_or_len(increase_mismatch_triples),\n 'Decrease Mismatch Triples': _count_or_len(decrease_mismatch_triples),\n 'Chaotic Triples': _count_or_len(chaotic_triples),\n 'Dampened Triples': _count_or_len(dampened_triples)\n }", - "docstring": "Summarize the stability of the graph." - }, - { - "code": "def j2(x):\n to_return = 2./(x+1e-15)*j1(x) - j0(x)\n to_return[x==0] = 0\n return to_return", - "docstring": "A fast j2 defined in terms of other special functions" - }, - { - "code": "def remove_exited_dusty_containers():\n client = get_docker_client()\n exited_containers = get_exited_dusty_containers()\n removed_containers = []\n for container in exited_containers:\n log_to_client(\"Removing container {}\".format(container['Names'][0]))\n try:\n client.remove_container(container['Id'], v=True)\n removed_containers.append(container)\n except Exception as e:\n log_to_client(e.message or str(e))\n return removed_containers", - "docstring": "Removed all dusty containers with 'Exited' in their status" - }, - { - "code": "async def release_control(self):\n cmd = \"releasecontrol\"\n return await asyncio.wait_for(\n self._protocol.send_command(cmd), timeout=self._timeout\n )", - "docstring": "Release control of QTM." - }, - { - "code": "def all_subclasses(cls):\n for subclass in cls.__subclasses__():\n yield subclass\n for subc in all_subclasses(subclass):\n yield subc", - "docstring": "Recursively generate of all the subclasses of class cls." - }, - { - "code": "def cmd(command):\n env()\n ipmi = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)\n command = \"ipmitool -U %s -P %s -H %s -p %s %s\" % (\n ipmi[\"USER\"], ipmi[\"PASS\"], ipmi[\"HOST\"], ipmi[\"PORT\"], command)\n cij.info(\"ipmi.command: %s\" % command)\n return cij.util.execute(command, shell=True, echo=True)", - "docstring": "Send IPMI 'command' via ipmitool" - }, - { - "code": "def unhex(s):\n bits = 0\n for c in s:\n if '0' <= c <= '9':\n i = ord('0')\n elif 'a' <= c <= 'f':\n i = ord('a')-10\n elif 'A' <= c <= 'F':\n i = ord('A')-10\n else:\n break\n bits = bits*16 + (ord(c) - i)\n return bits", - "docstring": "Get the integer value of a hexadecimal number." - }, - { - "code": "def populate_subtasks(self, context, sg, parent_job_id):\n db_sg = db_api.security_group_find(context, id=sg, scope=db_api.ONE)\n if not db_sg:\n return None\n ports = db_api.sg_gather_associated_ports(context, db_sg)\n if len(ports) == 0:\n return {\"ports\": 0}\n for port in ports:\n job_body = dict(action=\"update port %s\" % port['id'],\n tenant_id=db_sg['tenant_id'],\n resource_id=port['id'],\n parent_id=parent_job_id)\n job_body = dict(job=job_body)\n job = job_api.create_job(context.elevated(), job_body)\n rpc_consumer = QuarkSGAsyncConsumerClient()\n try:\n rpc_consumer.update_port(context, port['id'], job['id'])\n except om_exc.MessagingTimeout:\n LOG.error(\"Failed to update port. Rabbit running?\")\n return None", - "docstring": "Produces a list of ports to be updated async." - }, - { - "code": "def mrv(assignment, csp):\n \"Minimum-remaining-values heuristic.\"\n return argmin_random_tie(\n [v for v in csp.vars if v not in assignment],\n lambda var: num_legal_values(csp, var, assignment))", - "docstring": "Minimum-remaining-values heuristic." - }, - { - "code": "def pre(self, command, output_dir, vars):\n vars['license_name'] = 'Apache'\n vars['year'] = time.strftime('%Y', time.localtime())", - "docstring": "Called before template is applied." - }, - { - "code": "def rgb_to_ansi256(r, g, b):\n if r == g and g == b:\n if r < 8:\n return 16\n if r > 248:\n return 231\n return round(((r - 8) / 247.0) * 24) + 232\n ansi_r = 36 * round(r / 255.0 * 5.0)\n ansi_g = 6 * round(g / 255.0 * 5.0)\n ansi_b = round(b / 255.0 * 5.0)\n ansi = 16 + ansi_r + ansi_g + ansi_b\n return ansi", - "docstring": "Convert RGB to ANSI 256 color" - }, - { - "code": "def cmpToDataStore_uri(base, ds1, ds2):\n ret = difflib.get_close_matches(base.uri, [ds1.uri, ds2.uri], 1, cutoff=0.5)\n if len(ret) <= 0:\n return 0\n if ret[0] == ds1.uri:\n return -1\n return 1", - "docstring": "Bases the comparison of the datastores on URI alone." - }, - { - "code": "def read_timeout_value_header(timeoutvalue):\n timeoutsecs = 0\n timeoutvaluelist = timeoutvalue.split(\",\")\n for timeoutspec in timeoutvaluelist:\n timeoutspec = timeoutspec.strip()\n if timeoutspec.lower() == \"infinite\":\n return -1\n else:\n listSR = reSecondsReader.findall(timeoutspec)\n for secs in listSR:\n timeoutsecs = int(secs)\n if timeoutsecs > MAX_FINITE_TIMEOUT_LIMIT:\n return -1\n if timeoutsecs != 0:\n return timeoutsecs\n return None", - "docstring": "Return -1 if infinite, else return numofsecs." - }, - { - "code": "def uninstall_blacklisted(self):\n from burlap.system import distrib_family\n blacklisted_packages = self.env.blacklisted_packages\n if not blacklisted_packages:\n print('No blacklisted packages.')\n return\n else:\n family = distrib_family()\n if family == DEBIAN:\n self.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq purge %s' % ' '.join(blacklisted_packages))\n else:\n raise NotImplementedError('Unknown family: %s' % family)", - "docstring": "Uninstalls all blacklisted packages." - }, - { - "code": "def localize_datetime(dt, tz_name='UTC'):\n tz_aware_dt = dt\n if dt.tzinfo is None:\n utc = pytz.timezone('UTC')\n aware = utc.localize(dt)\n timezone = pytz.timezone(tz_name)\n tz_aware_dt = aware.astimezone(timezone)\n else:\n logger.warn('tzinfo already set')\n return tz_aware_dt", - "docstring": "Provide a timzeone-aware object for a given datetime and timezone name" - }, - { - "code": "def _generate_key(pass_id, passphrases, salt, algorithm):\n if pass_id not in passphrases:\n raise Exception('Passphrase not defined for id: %d' % pass_id)\n passphrase = passphrases[pass_id]\n if len(passphrase) < 32:\n raise Exception('Passphrase less than 32 characters long')\n digestmod = EncryptedPickle._get_hashlib(algorithm['pbkdf2_algorithm'])\n encoder = PBKDF2(passphrase, salt,\n iterations=algorithm['pbkdf2_iterations'],\n digestmodule=digestmod)\n return encoder.read(algorithm['key_size'])", - "docstring": "Generate and return PBKDF2 key" - }, - { - "code": "def check_paypal_api_key(app_configs=None, **kwargs):\n\tmessages = []\n\tmode = getattr(djpaypal_settings, \"PAYPAL_MODE\", None)\n\tif mode not in VALID_MODES:\n\t\tmsg = \"Invalid PAYPAL_MODE specified: {}.\".format(repr(mode))\n\t\thint = \"PAYPAL_MODE must be one of {}\".format(\", \".join(repr(k) for k in VALID_MODES))\n\t\tmessages.append(checks.Critical(msg, hint=hint, id=\"djpaypal.C001\"))\n\tfor setting in \"PAYPAL_CLIENT_ID\", \"PAYPAL_CLIENT_SECRET\":\n\t\tif not getattr(djpaypal_settings, setting, None):\n\t\t\tmsg = \"Invalid value specified for {}\".format(setting)\n\t\t\thint = \"Add PAYPAL_CLIENT_ID and PAYPAL_CLIENT_SECRET to your settings.\"\n\t\t\tmessages.append(checks.Critical(msg, hint=hint, id=\"djpaypal.C002\"))\n\treturn messages", - "docstring": "Check that the Paypal API keys are configured correctly" - }, - { - "code": "def _finalize_profiles(self):\n for stop, stop_profile in self._stop_profiles.items():\n assert (isinstance(stop_profile, NodeProfileMultiObjective))\n neighbor_label_bags = []\n walk_durations_to_neighbors = []\n departure_arrival_stop_pairs = []\n if stop_profile.get_walk_to_target_duration() != 0 and stop in self._walk_network.node:\n neighbors = networkx.all_neighbors(self._walk_network, stop)\n for neighbor in neighbors:\n neighbor_profile = self._stop_profiles[neighbor]\n assert (isinstance(neighbor_profile, NodeProfileMultiObjective))\n neighbor_real_connection_labels = neighbor_profile.get_labels_for_real_connections()\n neighbor_label_bags.append(neighbor_real_connection_labels)\n walk_durations_to_neighbors.append(int(self._walk_network.get_edge_data(stop, neighbor)[\"d_walk\"] /\n self._walk_speed))\n departure_arrival_stop_pairs.append((stop, neighbor))\n stop_profile.finalize(neighbor_label_bags, walk_durations_to_neighbors, departure_arrival_stop_pairs)", - "docstring": "Deal with the first walks by joining profiles to other stops within walking distance." - }, - { - "code": "def setup_mpi_gpus():\n if 'CUDA_VISIBLE_DEVICES' not in os.environ:\n if sys.platform == 'darwin':\n ids = []\n else:\n lrank, _lsize = get_local_rank_size(MPI.COMM_WORLD)\n ids = [lrank]\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = \",\".join(map(str, ids))", - "docstring": "Set CUDA_VISIBLE_DEVICES to MPI rank if not already set" - }, - { - "code": "def add_tracker_url(parser):\n parser.add_argument(\n '--tracker_url',\n metavar='(tracker url; default: \"' + DEFAULT_TRACKER_URL + '\")',\n type=str, default=DEFAULT_TRACKER_URL)\n return parser", - "docstring": "add optional tracker_url argument" - }, - { - "code": "def wrap_deepmind(env, episode_life=True, clip_rewards=True, frame_stack=False, scale=False):\n if episode_life:\n env = EpisodicLifeEnv(env)\n if 'FIRE' in env.unwrapped.get_action_meanings():\n env = FireResetEnv(env)\n env = WarpFrame(env)\n if scale:\n env = ScaledFloatFrame(env)\n if clip_rewards:\n env = ClipRewardEnv(env)\n if frame_stack:\n env = FrameStack(env, 4)\n return env", - "docstring": "Configure environment for DeepMind-style Atari." - }, - { - "code": "def _distance_matrix(self, a, b):\n def sq(x): return (x * x)\n matrix = sq(a[:, 0][:, None] - b[:, 0][None, :])\n for x, y in zip(a.T[1:], b.T[1:]):\n matrix += sq(x[:, None] - y[None, :])\n return matrix", - "docstring": "Pairwise distance between each point in `a` and each point in `b`" - }, - { - "code": "def jobCancelAllRunningJobs(self):\n with ConnectionFactory.get() as conn:\n query = 'UPDATE %s SET cancel=TRUE WHERE status<>%%s ' \\\n % (self.jobsTableName,)\n conn.cursor.execute(query, [self.STATUS_COMPLETED])\n return", - "docstring": "Set cancel field of all currently-running jobs to true." - }, - { - "code": "def encode(cls, hex):\n out = []\n for i in range(len(hex) // 8):\n word = endian_swap(hex[8*i:8*i+8])\n x = int(word, 16)\n w1 = x % cls.n\n w2 = (x // cls.n + w1) % cls.n\n w3 = (x // cls.n // cls.n + w2) % cls.n\n out += [cls.word_list[w1], cls.word_list[w2], cls.word_list[w3]]\n checksum = cls.get_checksum(\" \".join(out))\n out.append(checksum)\n return \" \".join(out)", - "docstring": "Convert hexadecimal string to mnemonic word representation with checksum." - }, - { - "code": "def add_spout(self, name, spout_cls, par, config=None, optional_outputs=None):\n spout_spec = spout_cls.spec(name=name, par=par, config=config,\n optional_outputs=optional_outputs)\n self.add_spec(spout_spec)\n return spout_spec", - "docstring": "Add a spout to the topology" - }, - { - "code": "def index():\n page = request.args.get('page', 1, type=int)\n per_page = request.args.get('per_page', 5, type=int)\n q = request.args.get('q', '')\n groups = Group.query_by_user(current_user, eager=True)\n if q:\n groups = Group.search(groups, q)\n groups = groups.paginate(page, per_page=per_page)\n requests = Membership.query_requests(current_user).count()\n invitations = Membership.query_invitations(current_user).count()\n return render_template(\n 'invenio_groups/index.html',\n groups=groups,\n requests=requests,\n invitations=invitations,\n page=page,\n per_page=per_page,\n q=q\n )", - "docstring": "List all user memberships." - }, - { - "code": "def fresh_cookies(ctx, mold=''):\n mold = mold or \"https://github.com/Springerle/py-generic-project.git\"\n tmpdir = os.path.join(tempfile.gettempdir(), \"cc-upgrade-pygments-markdown-lexer\")\n if os.path.isdir('.git'):\n pass\n if os.path.isdir(tmpdir):\n shutil.rmtree(tmpdir)\n if os.path.exists(mold):\n shutil.copytree(mold, tmpdir, ignore=shutil.ignore_patterns(\n \".git\", \".svn\", \"*~\",\n ))\n else:\n ctx.run(\"git clone {} {}\".format(mold, tmpdir))\n shutil.copy2(\"project.d/cookiecutter.json\", tmpdir)\n with pushd('..'):\n ctx.run(\"cookiecutter --no-input {}\".format(tmpdir))\n if os.path.exists('.git'):\n ctx.run(\"git status\")", - "docstring": "Refresh the project from the original cookiecutter template." - }, - { - "code": "def edit(self, text, media=None, utc=None, now=None):\n url = PATHS['EDIT'] % self.id\n post_data = \"text=%s&\" % text\n if now:\n post_data += \"now=%s&\" % now\n if utc:\n post_data += \"utc=%s&\" % utc\n if media:\n media_format = \"media[%s]=%s&\"\n for media_type, media_item in media.iteritems():\n post_data += media_format % (media_type, media_item)\n response = self.api.post(url=url, data=post_data)\n return Update(api=self.api, raw_response=response['update'])", - "docstring": "Edit an existing, individual status update." - }, - { - "code": "def padnames(names):\n longname_len = max(len(i) for i in names)\n padding = 5\n pnames = [name + \" \" * (longname_len - len(name)+ padding) \\\n for name in names]\n snppad = \"//\" + \" \" * (longname_len - 2 + padding)\n return np.array(pnames), snppad", - "docstring": "pads names for loci output" - }, - { - "code": "def _collapse_variants_by_function(graph: BELGraph, func: str) -> None:\n for parent_node, variant_node, data in graph.edges(data=True):\n if data[RELATION] == HAS_VARIANT and parent_node.function == func:\n collapse_pair(graph, from_node=variant_node, to_node=parent_node)", - "docstring": "Collapse all of the given functions' variants' edges to their parents, in-place." - }, - { - "code": "def RETURN(self, offset, size):\n data = self.read_buffer(offset, size)\n raise EndTx('RETURN', data)", - "docstring": "Halt execution returning output data" - }, - { - "code": "def _extract(self, in_tile=None, in_data=None, out_tile=None):\n return self.config.output.extract_subset(\n input_data_tiles=[(in_tile, in_data)],\n out_tile=out_tile\n )", - "docstring": "Extract data from tile." - }, - { - "code": "def signature_matches(func, args=(), kwargs={}):\n try:\n sig = inspect.signature(func)\n sig.bind(*args, **kwargs)\n except TypeError:\n return False\n else:\n return True", - "docstring": "Work out if a function is callable with some args or not." - }, - { - "code": "def clear(self):\n self._Memory = None\n self._numPatterns = 0\n self._M = None\n self._categoryList = []\n self._partitionIdList = []\n self._partitionIdMap = {}\n self._finishedLearning = False\n self._iterationIdx = -1\n if self.maxStoredPatterns > 0:\n assert self.useSparseMemory, (\"Fixed capacity KNN is implemented only \"\n \"in the sparse memory mode\")\n self.fixedCapacity = True\n self._categoryRecencyList = []\n else:\n self.fixedCapacity = False\n self._protoSizes = None\n self._s = None\n self._vt = None\n self._nc = None\n self._mean = None\n self._specificIndexTraining = False\n self._nextTrainingIndices = None", - "docstring": "Clears the state of the KNNClassifier." - }, - { - "code": "def from_single(cls, meta: ProgramDescription, source: str):\r\n instance = cls(meta)\r\n instance.vertex_source = ShaderSource(\r\n VERTEX_SHADER,\r\n meta.path or meta.vertex_shader,\r\n source\r\n )\r\n if GEOMETRY_SHADER in source:\r\n instance.geometry_source = ShaderSource(\r\n GEOMETRY_SHADER,\r\n meta.path or meta.geometry_shader,\r\n source,\r\n )\r\n if FRAGMENT_SHADER in source:\r\n instance.fragment_source = ShaderSource(\r\n FRAGMENT_SHADER,\r\n meta.path or meta.fragment_shader,\r\n source,\r\n )\r\n if TESS_CONTROL_SHADER in source:\r\n instance.tess_control_source = ShaderSource(\r\n TESS_CONTROL_SHADER,\r\n meta.path or meta.tess_control_shader,\r\n source,\r\n )\r\n if TESS_EVALUATION_SHADER in source:\r\n instance.tess_evaluation_source = ShaderSource(\r\n TESS_EVALUATION_SHADER,\r\n meta.path or meta.tess_evaluation_shader,\r\n source,\r\n )\r\n return instance", - "docstring": "Initialize a single glsl string containing all shaders" - }, - { - "code": "def config_logging(debug):\n if debug:\n logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s')\n logging.debug(\"Debug mode activated\")\n else:\n logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')", - "docstring": "Config logging level output output" - }, - { - "code": "def filter_by(zips=_zips, **kwargs):\n return [z for z in zips if all([k in z and z[k] == v for k, v in kwargs.items()])]", - "docstring": "Use `kwargs` to select for desired attributes from list of zipcode dicts" - }, - { - "code": "def remove_direct_link_triples(train, valid, test):\n pairs = set()\n merged = valid + test\n for t in merged:\n pairs.add((t.head, t.tail))\n filtered = filterfalse(lambda t: (t.head, t.tail) in pairs or (t.tail, t.head) in pairs, train)\n return list(filtered)", - "docstring": "Remove direct links in the training sets." - }, - { - "code": "def _send_stream_start(self, stream_id = None, stream_to = None):\n if self._output_state in (\"open\", \"closed\"):\n raise StreamError(\"Stream start already sent\")\n if not self.language:\n self.language = self.settings[\"language\"]\n if stream_to:\n stream_to = unicode(stream_to)\n elif self.peer and self.initiator:\n stream_to = unicode(self.peer)\n stream_from = None\n if self.me and (self.tls_established or not self.initiator):\n stream_from = unicode(self.me)\n if stream_id:\n self.stream_id = stream_id\n else:\n self.stream_id = None\n self.transport.send_stream_head(self.stanza_namespace,\n stream_from, stream_to,\n self.stream_id, language = self.language)\n self._output_state = \"open\"", - "docstring": "Send stream start tag." - }, - { - "code": "def blacklist(self, account):\n assert callable(self.blockchain.account_whitelist)\n return self.blockchain.account_whitelist(account, lists=[\"black\"], account=self)", - "docstring": "Add an other account to the blacklist of this account" - }, - { - "code": "async def query_presence(self, query_presence_request):\n response = hangouts_pb2.QueryPresenceResponse()\n await self._pb_request('presence/querypresence',\n query_presence_request, response)\n return response", - "docstring": "Return presence status for a list of users." - }, - { - "code": "def _product_file_hash(self, product=None):\n if self.hasher is None:\n return None\n else:\n products = self._rectify_products(product)\n product_file_hash = [\n util_hash.hash_file(p, hasher=self.hasher, base='hex')\n for p in products\n ]\n return product_file_hash", - "docstring": "Get the hash of the each product file" - }, - { - "code": "def backup(self, path):\n log.info('Backing up in '+path)\n files = self.file_list()\n self.prepare()\n for f in files:\n self.read_file(f[0], os.path.join(path, f[0]))", - "docstring": "Backup all files from the device" - }, - { - "code": "def _multiple_self_ref_fk_check(class_model):\n self_fk = []\n for f in class_model._meta.concrete_fields:\n if f.related_model in self_fk:\n return True\n if f.related_model == class_model:\n self_fk.append(class_model)\n return False", - "docstring": "We check whether a class has more than 1 FK reference to itself." - }, - { - "code": "def encodeValue(self, value, toBeAdded=True):\n encodedValue = np.array(self.encoder.encode(value), dtype=realDType)\n if toBeAdded:\n self.encodings.append(encodedValue)\n self.numEncodings+=1\n return encodedValue", - "docstring": "Value is encoded as a sdr using the encoding parameters of the Field" - }, - { - "code": "def _format_id(self, payload):\n if 'id' in payload:\n return str(payload['id'])\n if 'results' in payload:\n return ' '.join([six.text_type(item['id']) for item in payload['results']])\n raise MultipleRelatedError('Could not serialize output with id format.')", - "docstring": "Echos only the id" - }, - { - "code": "def station_selection_menu(self, error=None):\n self.screen.clear()\n if error:\n self.screen.print_error(\"{}\\n\".format(error))\n for i, station in enumerate(self.stations):\n i = \"{:>3}\".format(i)\n print(\"{}: {}\".format(Colors.yellow(i), station.name))\n return self.stations[self.screen.get_integer(\"Station: \")]", - "docstring": "Format a station menu and make the user select a station" - }, - { - "code": "def version():\n click.echo('Tower CLI %s' % __version__)\n click.echo('API %s' % CUR_API_VERSION)\n try:\n r = client.get('/config/')\n except RequestException as ex:\n raise exc.TowerCLIError('Could not connect to Ansible Tower.\\n%s' %\n six.text_type(ex))\n config = r.json()\n license = config.get('license_info', {}).get('license_type', 'open')\n if license == 'open':\n server_type = 'AWX'\n else:\n server_type = 'Ansible Tower'\n click.echo('%s %s' % (server_type, config['version']))\n click.echo('Ansible %s' % config['ansible_version'])", - "docstring": "Display full version information." - }, - { - "code": "def use_parser(self,parsername):\n self.__parser = self.parsers[parsername]\n self.__parser()", - "docstring": "Set parsername as the current parser and apply it." - }, - { - "code": "def shoebot_example(**shoebot_kwargs):\n def decorator(f):\n def run():\n from shoebot import ShoebotInstallError\n print(\" Shoebot - %s:\" % f.__name__.replace(\"_\", \" \"))\n try:\n import shoebot\n outputfile = \"/tmp/shoebot-%s.png\" % f.__name__\n bot = shoebot.create_bot(outputfile=outputfile)\n f(bot)\n bot.finish()\n print(' [passed] : %s' % outputfile)\n print('')\n except ShoebotInstallError as e:\n print(' [failed]', e.args[0])\n print('')\n except Exception:\n print(' [failed] - traceback:')\n for line in traceback.format_exc().splitlines():\n print(' %s' % line)\n print('')\n return run\n return decorator", - "docstring": "Decorator to run some code in a bot instance." - }, - { - "code": "def _process_facet_terms(facet_terms):\n elastic_facets = {}\n for facet in facet_terms:\n facet_term = {\"field\": facet}\n if facet_terms[facet]:\n for facet_option in facet_terms[facet]:\n facet_term[facet_option] = facet_terms[facet][facet_option]\n elastic_facets[facet] = {\n \"terms\": facet_term\n }\n return elastic_facets", - "docstring": "We have a list of terms with which we return facets" - }, - { - "code": "def make_aware(dt):\n return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)", - "docstring": "Appends tzinfo and assumes UTC, if datetime object has no tzinfo already." - }, - { - "code": "def daily_at(cls, at, target):\n daily = datetime.timedelta(days=1)\n when = datetime.datetime.combine(datetime.date.today(), at)\n if when < now():\n when += daily\n return cls.at_time(cls._localize(when), daily, target)", - "docstring": "Schedule a command to run at a specific time each day." - }, - { - "code": "def search_blogs(q, start=0, wait=10, asynchronous=False, cached=False):\n service = GOOGLE_BLOGS\n return GoogleSearch(q, start, service, \"\", wait, asynchronous, cached)", - "docstring": "Returns a Google blogs query formatted as a GoogleSearch list object." - }, - { - "code": "def generate_hatpi_binnedlc_pkl(binnedpklf, textlcf, timebinsec,\n outfile=None):\n binlcdict = read_hatpi_binnedlc(binnedpklf, textlcf, timebinsec)\n if binlcdict:\n if outfile is None:\n outfile = os.path.join(\n os.path.dirname(binnedpklf),\n '%s-hplc.pkl' % (\n os.path.basename(binnedpklf).replace('sec-lc.pkl.gz','')\n )\n )\n return lcdict_to_pickle(binlcdict, outfile=outfile)\n else:\n LOGERROR('could not read binned HATPI LC: %s' % binnedpklf)\n return None", - "docstring": "This reads the binned LC and writes it out to a pickle." - }, - { - "code": "def parse_single_computer(entry):\n computer = Computer(dns_hostname=get_field(entry, 'dNSHostName'), description=get_field(\n entry, 'description'), os=get_field(entry, 'operatingSystem'), group_id=get_field(entry, 'primaryGroupID'))\n try:\n ip = str(ipaddress.ip_address(get_field(entry, 'IPv4')))\n except ValueError:\n ip = ''\n if ip:\n computer.ip = ip\n elif computer.dns_hostname:\n computer.ip = resolve_ip(computer.dns_hostname)\n return computer", - "docstring": "Parse the entry into a computer object." - }, - { - "code": "def search(self, number=None, *args, **kwargs):\n search = self.create_search(*args, **kwargs)\n try:\n if number:\n response = search[0:number]\n else:\n args, _ = self.core_parser.parse_known_args()\n if args.number:\n response = search[0:args.number]\n else:\n response = search.scan()\n return [hit for hit in response]\n except NotFoundError:\n print_error(\"The index was not found, have you initialized the index?\")\n return []\n except (ConnectionError, TransportError):\n print_error(\"Cannot connect to elasticsearch\")\n return []", - "docstring": "Searches the elasticsearch instance to retrieve the requested documents." - }, - { - "code": "def _get_enterprise_customer_users_batch(self, start, end):\n LOGGER.info('Fetching new batch of enterprise customer users from indexes: %s to %s', start, end)\n return User.objects.filter(pk__in=self._get_enterprise_customer_user_ids())[start:end]", - "docstring": "Returns a batched queryset of EnterpriseCustomerUser objects." - }, - { - "code": "def _apply_record_length_checks(self, i, r, summarize=False, context=None):\n for code, message, modulus in self._record_length_checks:\n if i % modulus == 0:\n if len(r) != len(self._field_names):\n p = {'code': code}\n if not summarize:\n p['message'] = message\n p['row'] = i + 1\n p['record'] = r\n p['length'] = len(r)\n if context is not None: p['context'] = context\n yield p", - "docstring": "Apply record length checks on the given record `r`." - }, - { - "code": "def __get_dash_menu(self, kibiter_major):\n omenu = []\n omenu.append(self.menu_panels_common['Overview'])\n ds_menu = self.__get_menu_entries(kibiter_major)\n kafka_menu = None\n community_menu = None\n found_kafka = [pos for pos, menu in enumerate(ds_menu) if menu['name'] == KAFKA_NAME]\n if found_kafka:\n kafka_menu = ds_menu.pop(found_kafka[0])\n found_community = [pos for pos, menu in enumerate(ds_menu) if menu['name'] == COMMUNITY_NAME]\n if found_community:\n community_menu = ds_menu.pop(found_community[0])\n ds_menu.sort(key=operator.itemgetter('name'))\n omenu += ds_menu\n if kafka_menu:\n omenu.append(kafka_menu)\n if community_menu:\n omenu.append(community_menu)\n omenu.append(self.menu_panels_common['Data Status'])\n omenu.append(self.menu_panels_common['About'])\n logger.debug(\"Menu for panels: %s\", json.dumps(ds_menu, indent=4))\n return omenu", - "docstring": "Order the dashboard menu" - }, - { - "code": "def function_selector(method_name_and_signature):\n s = sha3.keccak_256()\n s.update(method_name_and_signature.encode())\n return bytes(s.digest()[:4])", - "docstring": "Makes a function hash id from a method signature" - }, - { - "code": "def resolution_millis(self):\n if self.resolution is None or not isinstance(self.resolution, basestring):\n return self.resolution\n val, mult = self.resolution.split(' ')\n return int(float(val) * self._multipier(mult) * 1000)", - "docstring": "if set, get the value of resolution in milliseconds" - }, - { - "code": "def create_body_index(xml_string):\n xml = ET.fromstring(xml_string)\n body_to_index = {}\n for index, body in enumerate(xml.findall(\"*/Body/Name\")):\n body_to_index[body.text.strip()] = index\n return body_to_index", - "docstring": "Extract a name to index dictionary from 6dof settings xml" - }, - { - "code": "def ss_wrap(func):\n def wrapper(self, *args, **kwargs):\n if not self.savedsearch:\n self.savedsearch = SavedSearch(self)\n return func(self, *args, **kwargs)\n return wrapper", - "docstring": "ensure that a SavedSearch object exists" - }, - { - "code": "def print_file_info():\n tpl = TableLogger(columns='file,created,modified,size')\n for f in os.listdir('.'):\n size = os.stat(f).st_size\n date_created = datetime.fromtimestamp(os.path.getctime(f))\n date_modified = datetime.fromtimestamp(os.path.getmtime(f))\n tpl(f, date_created, date_modified, size)", - "docstring": "Prints file details in the current directory" - }, - { - "code": "def bik():\n return '04' + \\\n ''.join([str(random.randint(1, 9)) for _ in range(5)]) + \\\n str(random.randint(0, 49) + 50)", - "docstring": "Return a random bank identification number." - }, - { - "code": "def __intermediate_dns_servers(self, uci, address):\n if 'dns' in uci:\n return uci['dns']\n if address['proto'] in ['dhcp', 'dhcpv6', 'none']:\n return None\n dns = self.netjson.get('dns_servers', None)\n if dns:\n return ' '.join(dns)", - "docstring": "determines UCI interface \"dns\" option" - }, - { - "code": "def sample_cleanup(data, sample):\n umap1file = os.path.join(data.dirs.edits, sample.name+\"-tmp-umap1.fastq\")\n umap2file = os.path.join(data.dirs.edits, sample.name+\"-tmp-umap2.fastq\")\n unmapped = os.path.join(data.dirs.refmapping, sample.name+\"-unmapped.bam\")\n samplesam = os.path.join(data.dirs.refmapping, sample.name+\".sam\")\n split1 = os.path.join(data.dirs.edits, sample.name+\"-split1.fastq\")\n split2 = os.path.join(data.dirs.edits, sample.name+\"-split2.fastq\")\n refmap_derep = os.path.join(data.dirs.edits, sample.name+\"-refmap_derep.fastq\")\n for f in [umap1file, umap2file, unmapped, samplesam, split1, split2, refmap_derep]:\n try:\n os.remove(f)\n except:\n pass", - "docstring": "Clean up a bunch of loose files." - }, - { - "code": "def handle_int(self, item):\n doc = yield from self.handle_get(item)\n if doc is None:\n return None\n return int(doc.value.u8.text) or None", - "docstring": "Helper method for fetching a integer value." - }, - { - "code": "def run(self, x, y, lr=0.01, train_epochs=1000, test_epochs=1000, idx=0, verbose=None, **kwargs):\n verbose = SETTINGS.get_default(verbose=verbose)\n optim = th.optim.Adam(self.parameters(), lr=lr)\n running_loss = 0\n teloss = 0\n for i in range(train_epochs + test_epochs):\n optim.zero_grad()\n pred = self.forward(x)\n loss = self.criterion(pred, y)\n running_loss += loss.item()\n if i < train_epochs:\n loss.backward()\n optim.step()\n else:\n teloss += running_loss\n if verbose and not i % 300:\n print('Idx:{}; epoch:{}; score:{}'.\n format(idx, i, running_loss/300))\n running_loss = 0.0\n return teloss / test_epochs", - "docstring": "Run the GNN on a pair x,y of FloatTensor data." - }, - { - "code": "def p_file_contributor(self, f_term, predicate):\n for _, _, contributor in self.graph.triples((f_term, predicate, None)):\n self.builder.add_file_contribution(self.doc, six.text_type(contributor))", - "docstring": "Parse all file contributors and adds them to the model." - }, - { - "code": "def log(self):\n from heronpy.streamlet.impl.logbolt import LogStreamlet\n log_streamlet = LogStreamlet(self)\n self._add_child(log_streamlet)\n return", - "docstring": "Logs all elements of this streamlet. This returns nothing" - }, - { - "code": "def copy(src, dst, symlink=False, rellink=False):\n func = os.symlink if symlink else shutil.copy2\n if symlink and os.path.lexists(dst):\n os.remove(dst)\n if rellink:\n func(os.path.relpath(src, os.path.dirname(dst)), dst)\n else:\n func(src, dst)", - "docstring": "Copy or symlink the file." - }, - { - "code": "def init_async(self, loop=None):\n self._loop = loop or asyncio.get_event_loop()\n self._async_lock = asyncio.Lock(loop=loop)\n if not self.database == ':memory:':\n self._state = ConnectionLocal()", - "docstring": "Use when application is starting." - }, - { - "code": "def _create_deployment(self, deployment):\n api_response = self.kube_client.create_namespaced_deployment(\n body=deployment,\n namespace=self.namespace)\n logger.debug(\"Deployment created. status='{0}'\".format(str(api_response.status)))", - "docstring": "Create the kubernetes deployment" - }, - { - "code": "def _localize_inputs_command(self, task_dir, inputs, user_project):\n commands = []\n for i in inputs:\n if i.recursive or not i.value:\n continue\n source_file_path = i.uri\n local_file_path = task_dir + '/' + _DATA_SUBDIR + '/' + i.docker_path\n dest_file_path = self._get_input_target_path(local_file_path)\n commands.append('mkdir -p \"%s\"' % os.path.dirname(local_file_path))\n if i.file_provider in [job_model.P_LOCAL, job_model.P_GCS]:\n if user_project:\n command = 'gsutil -u %s -mq cp \"%s\" \"%s\"' % (\n user_project, source_file_path, dest_file_path)\n else:\n command = 'gsutil -mq cp \"%s\" \"%s\"' % (source_file_path,\n dest_file_path)\n commands.append(command)\n return '\\n'.join(commands)", - "docstring": "Returns a command that will stage inputs." - }, - { - "code": "def _add_conversation(self, conversation, events=[],\n event_cont_token=None):\n conv_id = conversation.conversation_id.id\n logger.debug('Adding new conversation: {}'.format(conv_id))\n conv = Conversation(self._client, self._user_list, conversation,\n events, event_cont_token)\n self._conv_dict[conv_id] = conv\n return conv", - "docstring": "Add new conversation from hangouts_pb2.Conversation" - }, - { - "code": "def find_working_password(self, usernames=None, host_strings=None):\n r = self.local_renderer\n if host_strings is None:\n host_strings = []\n if not host_strings:\n host_strings.append(self.genv.host_string)\n if usernames is None:\n usernames = []\n if not usernames:\n usernames.append(self.genv.user)\n for host_string in host_strings:\n for username in usernames:\n passwords = []\n passwords.append(self.genv.user_default_passwords[username])\n passwords.append(self.genv.user_passwords[username])\n passwords.append(self.env.default_password)\n for password in passwords:\n with settings(warn_only=True):\n r.env.host_string = host_string\n r.env.password = password\n r.env.user = username\n ret = r._local(\"sshpass -p '{password}' ssh -o StrictHostKeyChecking=no {user}@{host_string} echo hello\", capture=True)\n if ret.return_code in (1, 6) or 'hello' in ret:\n return host_string, username, password\n raise Exception('No working login found.')", - "docstring": "Returns the first working combination of username and password for the current host." - }, - { - "code": "def nature_cnn(unscaled_images, **conv_kwargs):\n scaled_images = tf.cast(unscaled_images, tf.float32) / 255.\n activ = tf.nn.relu\n h = activ(conv(scaled_images, 'c1', nf=32, rf=8, stride=4, init_scale=np.sqrt(2),\n **conv_kwargs))\n h2 = activ(conv(h, 'c2', nf=64, rf=4, stride=2, init_scale=np.sqrt(2), **conv_kwargs))\n h3 = activ(conv(h2, 'c3', nf=64, rf=3, stride=1, init_scale=np.sqrt(2), **conv_kwargs))\n h3 = conv_to_fc(h3)\n return activ(fc(h3, 'fc1', nh=512, init_scale=np.sqrt(2)))", - "docstring": "CNN from Nature paper." - }, - { - "code": "def str_from_text(text):\n REGEX = re.compile('((.|\\n)+)', re.UNICODE)\n match = REGEX.match(text)\n if match:\n return match.group(1)\n else:\n return None", - "docstring": "Return content of a free form text block as a string." - }, - { - "code": "def run_matcher(self, subject, *expected, **kw):\n self.expected = expected\n _args = (subject,)\n if self.kind == OperatorTypes.MATCHER:\n _args += expected\n try:\n result = self.match(*_args, **kw)\n except Exception as error:\n return self._make_error(error=error)\n reasons = []\n if isinstance(result, tuple):\n result, reasons = result\n if result is False and self.ctx.negate:\n return True\n if result is True and not self.ctx.negate:\n return True\n return self._make_error(reasons=reasons)", - "docstring": "Runs the operator matcher test function." - }, - { - "code": "def _insert_to_array(self, start, results):\n qrts, wgts, qsts = results\n with h5py.File(self.database.output, 'r+') as out:\n chunk = self._chunksize\n out['quartets'][start:start+chunk] = qrts\n if self.checkpoint.boots:\n key = \"qboots/b{}\".format(self.checkpoint.boots-1)\n out[key][start:start+chunk] = qsts\n else:\n out[\"qstats\"][start:start+chunk] = qsts", - "docstring": "inputs results from workers into hdf4 array" - }, - { - "code": "def create(cls, group, user, state=MembershipState.ACTIVE):\n with db.session.begin_nested():\n membership = cls(\n user_id=user.get_id(),\n id_group=group.id,\n state=state,\n )\n db.session.add(membership)\n return membership", - "docstring": "Create a new membership." - }, - { - "code": "def originFormat(thing):\n if type(thing) is list and type(thing[0]) is dict:\n return originFormat_listOfDicts(thing)\n if type(thing) is list and type(thing[0]) is list:\n return originFormat_listOfDicts(dictFlat(thing))\n else:\n print(\" !! I don't know how to format this object!\")\n print(thing)", - "docstring": "Try to format anything as a 2D matrix with column names." - }, - { - "code": "def fetch_tags_dates(self):\n if self.options.verbose:\n print(\n \"Fetching dates for {} tags...\".format(len(self.filtered_tags))\n )\n def worker(tag):\n self.get_time_of_tag(tag)\n threads = []\n max_threads = 50\n cnt = len(self.filtered_tags)\n for i in range(0, (cnt // max_threads) + 1):\n for j in range(max_threads):\n idx = i * 50 + j\n if idx == cnt:\n break\n t = threading.Thread(target=worker,\n args=(self.filtered_tags[idx],))\n threads.append(t)\n t.start()\n if self.options.verbose > 2:\n print(\".\", end=\"\")\n for t in threads:\n t.join()\n if self.options.verbose > 2:\n print(\".\")\n if self.options.verbose > 1:\n print(\"Fetched dates for {} tags.\".format(\n len(self.tag_times_dict))\n )", - "docstring": "Async fetching of all tags dates." - }, - { - "code": "def _add_notify(self, task_spec):\n if task_spec.name in self.task_specs:\n raise KeyError('Duplicate task spec name: ' + task_spec.name)\n self.task_specs[task_spec.name] = task_spec\n task_spec.id = len(self.task_specs)", - "docstring": "Called by a task spec when it was added into the workflow." - }, - { - "code": "def clubConsumables(self, fast=False):\n method = 'GET'\n url = 'club/consumables/development'\n rc = self.__request__(method, url)\n events = [self.pin.event('page_view', 'Hub - Club')]\n self.pin.send(events, fast=fast)\n events = [self.pin.event('page_view', 'Club - Consumables')]\n self.pin.send(events, fast=fast)\n events = [self.pin.event('page_view', 'Club - Consumables - List View')]\n self.pin.send(events, fast=fast)\n return [itemParse(i) for i in rc.get('itemData', ())]", - "docstring": "Return all consumables from club." - }, - { - "code": "def show_summary(self):\n self.get_instance_state()\n status_string = \"EC2 Summary:\\n\\tVPC IDs: {}\\n\\tSubnet IDs: \\\n{}\\n\\tSecurity Group ID: {}\\n\\tRunning Instance IDs: {}\\n\".format(\n self.vpc_id, self.sn_ids, self.sg_id, self.instances\n )\n status_string += \"\\tInstance States:\\n\\t\\t\"\n self.get_instance_state()\n for state in self.instance_states.keys():\n status_string += \"Instance ID: {} State: {}\\n\\t\\t\".format(\n state, self.instance_states[state]\n )\n status_string += \"\\n\"\n logger.info(status_string)\n return status_string", - "docstring": "Print human readable summary of current AWS state to log and to console." - }, - { - "code": "def _validate_ram(ram_in_mb):\n return int(GoogleV2CustomMachine._MEMORY_MULTIPLE * math.ceil(\n ram_in_mb / GoogleV2CustomMachine._MEMORY_MULTIPLE))", - "docstring": "Rounds ram up to the nearest multiple of _MEMORY_MULTIPLE." - }, - { - "code": "def csi_wrap(self, value, capname, *args):\n if isinstance(value, str):\n value = value.encode('utf-8')\n return b''.join([\n self.csi(capname, *args),\n value,\n self.csi('sgr0'),\n ])", - "docstring": "Return a value wrapped in the selected CSI and does a reset." - }, - { - "code": "def allowed_operations(self):\n if self.slug is not None:\n return self.meta.detail_allowed_operations\n return self.meta.list_allowed_operations", - "docstring": "Retrieves the allowed operations for this request." - }, - { - "code": "def item_fields(self):\n if self.templates.get(\"item_fields\") and not self._updated(\n \"/itemFields\", self.templates[\"item_fields\"], \"item_fields\"\n ):\n return self.templates[\"item_fields\"][\"tmplt\"]\n query_string = \"/itemFields\"\n retrieved = self._retrieve_data(query_string)\n return self._cache(retrieved, \"item_fields\")", - "docstring": "Get all available item fields" - }, - { - "code": "def document_func_view(serializer_class=None,\n response_serializer_class=None,\n filter_backends=None,\n permission_classes=None,\n authentication_classes=None,\n doc_format_args=list(),\n doc_format_kwargs=dict()):\n def decorator(func):\n if serializer_class:\n func.cls.serializer_class = func.view_class.serializer_class = serializer_class\n if response_serializer_class:\n func.cls.response_serializer_class = func.view_class.response_serializer_class = response_serializer_class\n if filter_backends:\n func.cls.filter_backends = func.view_class.filter_backends = filter_backends\n if permission_classes:\n func.cls.permission_classes = func.view_class.permission_classes = permission_classes\n if authentication_classes:\n func.cls.authentication_classes = func.view_class.authentication_classes = authentication_classes\n if doc_format_args or doc_format_kwargs:\n func.cls.__doc__ = func.view_class.__doc__ = getdoc(func).format(*doc_format_args, **doc_format_kwargs)\n return func\n return decorator", - "docstring": "Decorator to make functional view documentable via drf-autodocs" - }, - { - "code": "def finalize(self):\n if self.__head_less:\n warn(f'{self.__class__.__name__} configured to head less mode. finalize unusable')\n elif not self.__head_generate:\n warn(f'{self.__class__.__name__} already finalized or fitted')\n elif not self.__head_dict:\n raise NotFittedError(f'{self.__class__.__name__} instance is not fitted yet')\n else:\n if self.remove_rare_ratio:\n self.__clean_head(*self.__head_rare)\n self.__prepare_header()\n self.__head_rare = None\n self.__head_generate = False", - "docstring": "finalize partial fitting procedure" - }, - { - "code": "def show(self):\n copied = self.copy()\n enumerated = [el for el in enumerate(copied)]\n for (group_ind, specs) in enumerated:\n if len(enumerated) > 1: print(\"Group %d\" % group_ind)\n ordering = self.constant_keys + self.varying_keys\n spec_lines = [', '.join(['%s=%s' % (k, s[k]) for k in ordering]) for s in specs]\n print('\\n'.join(['%d: %s' % (i,l) for (i,l) in enumerate(spec_lines)]))\n print('Remaining arguments not available for %s' % self.__class__.__name__)", - "docstring": "When dynamic, not all argument values may be available." - }, - { - "code": "def locked_delete(self):\n filters = {self.key_name: self.key_value}\n self.session.query(self.model_class).filter_by(**filters).delete()", - "docstring": "Delete credentials from the SQLAlchemy datastore." - }, - { - "code": "def update_sg(self, context, sg, rule_id, action):\n db_sg = db_api.security_group_find(context, id=sg, scope=db_api.ONE)\n if not db_sg:\n return None\n with context.session.begin():\n job_body = dict(action=\"%s sg rule %s\" % (action, rule_id),\n resource_id=rule_id,\n tenant_id=db_sg['tenant_id'])\n job_body = dict(job=job_body)\n job = job_api.create_job(context.elevated(), job_body)\n rpc_client = QuarkSGAsyncProducerClient()\n try:\n rpc_client.populate_subtasks(context, sg, job['id'])\n except om_exc.MessagingTimeout:\n LOG.error(\"Failed to create subtasks. Rabbit running?\")\n return None\n return {\"job_id\": job['id']}", - "docstring": "Begins the async update process." - }, - { - "code": "def put(self, key):\n self._consul_request('PUT', self._key_url(key['name']), json=key)\n return key['name']", - "docstring": "Put and return the only unique identifier possible, its url" - }, - { - "code": "def scale_context_and_center(self, cr):\n bot_width, bot_height = self.bot_size\n if self.width != bot_width or self.height != bot_height:\n if self.width < self.height:\n scale_x = float(self.width) / float(bot_width)\n scale_y = scale_x\n cr.translate(0, (self.height - (bot_height * scale_y)) / 2.0)\n elif self.width > self.height:\n scale_y = float(self.height) / float(bot_height)\n scale_x = scale_y\n cr.translate((self.width - (bot_width * scale_x)) / 2.0, 0)\n else:\n scale_x = 1.0\n scale_y = 1.0\n cr.scale(scale_x, scale_y)\n self.input_device.scale_x = scale_y\n self.input_device.scale_y = scale_y", - "docstring": "Scale context based on difference between bot size and widget" - }, - { - "code": "def _getClassifierRegion(self):\n if (self._netInfo.net is not None and\n \"Classifier\" in self._netInfo.net.regions):\n return self._netInfo.net.regions[\"Classifier\"]\n else:\n return None", - "docstring": "Returns reference to the network's Classifier region" - }, - { - "code": "def t_heredoc(self, t):\n r'<<\\S+\\r?\\n'\n t.lexer.is_tabbed = False\n self._init_heredoc(t)\n t.lexer.begin('heredoc')", - "docstring": "r'<<\\S+\\r?\\n" - }, - { - "code": "def raises(self, ex):\n if not callable(self.val):\n raise TypeError('val must be callable')\n if not issubclass(ex, BaseException):\n raise TypeError('given arg must be exception')\n return AssertionBuilder(self.val, self.description, self.kind, ex)", - "docstring": "Asserts that val is callable and that when called raises the given error." - }, - { - "code": "def _density(self):\n return 2.0*len(self.edges) / (len(self.nodes) * (len(self.nodes)-1))", - "docstring": "The number of edges in relation to the total number of possible edges." - }, - { - "code": "def user_and_project_from_git(self, options, arg0=None, arg1=None):\n user, project = self.user_project_from_option(options, arg0, arg1)\n if user and project:\n return user, project\n try:\n remote = subprocess.check_output(\n [\n 'git', 'config', '--get',\n 'remote.{0}.url'.format(options.git_remote)\n ]\n )\n except subprocess.CalledProcessError:\n return None, None\n except WindowsError:\n print(\"git binary not found.\")\n exit(1)\n else:\n return self.user_project_from_remote(remote)", - "docstring": "Detects user and project from git." - }, - { - "code": "def _get_env(self, env_var):\n value = os.environ.get(env_var)\n if not value:\n raise ValueError('Missing environment variable:%s' % env_var)\n return value", - "docstring": "Helper to read an environment variable" - }, - { - "code": "def TK_askPassword(title=\"input\",msg=\"type here:\"):\n root = tkinter.Tk()\n root.withdraw()\n root.attributes(\"-topmost\", True)\n root.lift()\n value=tkinter.simpledialog.askstring(title,msg)\n root.destroy()\n return value", - "docstring": "use the GUI to ask for a string." - }, - { - "code": "def write_pgpass(self, name=None, site=None, use_sudo=0, root=0):\n r = self.database_renderer(name=name, site=site)\n root = int(root)\n use_sudo = int(use_sudo)\n r.run('touch {pgpass_path}')\n if '~' in r.env.pgpass_path:\n r.run('chmod {pgpass_chmod} {pgpass_path}')\n else:\n r.sudo('chmod {pgpass_chmod} {pgpass_path}')\n if root:\n r.env.shell_username = r.env.get('db_root_username', 'postgres')\n r.env.shell_password = r.env.get('db_root_password', 'password')\n else:\n r.env.shell_username = r.env.db_user\n r.env.shell_password = r.env.db_password\n r.append(\n '{db_host}:{port}:*:{shell_username}:{shell_password}',\n r.env.pgpass_path,\n use_sudo=use_sudo)", - "docstring": "Write the file used to store login credentials for PostgreSQL." - }, - { - "code": "def create_environment(self, env_name, version_label=None,\n solution_stack_name=None, cname_prefix=None, description=None,\n option_settings=None, tier_name='WebServer', tier_type='Standard', tier_version='1.1'):\n out(\"Creating environment: \" + str(env_name) + \", tier_name:\" + str(tier_name) + \", tier_type:\" + str(tier_type))\n self.ebs.create_environment(self.app_name, env_name,\n version_label=version_label,\n solution_stack_name=solution_stack_name,\n cname_prefix=cname_prefix,\n description=description,\n option_settings=option_settings,\n tier_type=tier_type,\n tier_name=tier_name,\n tier_version=tier_version)", - "docstring": "Creates a new environment" - }, - { - "code": "def color_scale(color, level):\n return tuple([int(i * level) >> 8 for i in list(color)])", - "docstring": "Scale RGB tuple by level, 0 - 256" - }, - { - "code": "def extract_diff_sla_from_config_file(obj, options_file):\n rule_strings = {}\n config_obj = ConfigParser.ConfigParser()\n config_obj.optionxform = str\n config_obj.read(options_file)\n for section in config_obj.sections():\n rule_strings, kwargs = get_rule_strings(config_obj, section)\n for (key, val) in rule_strings.iteritems():\n set_sla(obj, section, key, val)", - "docstring": "Helper function to parse diff config file, which contains SLA rules for diff comparisons" - }, - { - "code": "def count_sequences(infile):\n seq_reader = sequences.file_reader(infile)\n n = 0\n for seq in seq_reader:\n n += 1\n return n", - "docstring": "Returns the number of sequences in a file" - }, - { - "code": "def start(self):\n self.receiver = self.Receiver(\n self.read,\n self.write,\n self.send_lock,\n self.senders,\n self.frames_received,\n callback=self.receive_callback,\n fcs_nack=self.fcs_nack,\n )\n self.receiver.start()", - "docstring": "Starts HDLC controller's threads." - }, - { - "code": "def parent(groups,ID):\n if ID in groups.keys():\n return ID\n if not ID in groups.keys():\n for actualParent in groups.keys():\n if ID in groups[actualParent]:\n return actualParent\n return None", - "docstring": "given a groups dictionary and an ID, return its actual parent ID." - }, - { - "code": "def combine(cls, date, time):\n \"Construct a datetime from a given date and a given time.\"\n if not isinstance(date, _date_class):\n raise TypeError(\"date argument must be a date instance\")\n if not isinstance(time, _time_class):\n raise TypeError(\"time argument must be a time instance\")\n return cls(date.year, date.month, date.day,\n time.hour, time.minute, time.second, time.microsecond,\n time.tzinfo)", - "docstring": "Construct a datetime from a given date and a given time." - }, - { - "code": "def decorate_matches(match_in, match_word):\n matches = re.finditer(match_word, match_in, re.IGNORECASE)\n for matched_string in set([match.group() for match in matches]):\n match_in = match_in.replace(\n matched_string,\n getattr(settings, \"SEARCH_MATCH_DECORATION\", u\"{}\").format(matched_string)\n )\n return match_in", - "docstring": "decorate the matches within the excerpt" - }, - { - "code": "def instruction(self):\n try:\n _decoding_cache = getattr(self, '_decoding_cache')\n except Exception:\n _decoding_cache = self._decoding_cache = {}\n pc = self.pc\n if isinstance(pc, Constant):\n pc = pc.value\n if pc in _decoding_cache:\n return _decoding_cache[pc]\n def getcode():\n bytecode = self.bytecode\n for pc_i in range(pc, len(bytecode)):\n yield simplify(bytecode[pc_i]).value\n while True:\n yield 0\n instruction = EVMAsm.disassemble_one(getcode(), pc=pc, fork=DEFAULT_FORK)\n _decoding_cache[pc] = instruction\n return instruction", - "docstring": "Current instruction pointed by self.pc" - }, - { - "code": "def conf_path(self):\n from burlap.system import distrib_id, distrib_release\n hostname = self.current_hostname\n if hostname not in self._conf_cache:\n self.env.conf_specifics[hostname] = self.env.conf_default\n d_id = distrib_id()\n d_release = distrib_release()\n for key in ((d_id, d_release), (d_id,)):\n if key in self.env.conf_specifics:\n self._conf_cache[hostname] = self.env.conf_specifics[key]\n return self._conf_cache[hostname]", - "docstring": "Retrieves the path to the MySQL configuration file." - }, - { - "code": "def jocker(test_options=None):\n version = ver_check()\n options = test_options or docopt(__doc__, version=version)\n _set_global_verbosity_level(options.get('--verbose'))\n jocker_lgr.debug(options)\n jocker_run(options)", - "docstring": "Main entry point for script." - }, - { - "code": "def metadata(proto_dataset_uri, relpath_in_dataset, key, value):\n proto_dataset = dtoolcore.ProtoDataSet.from_uri(\n uri=proto_dataset_uri,\n config_path=CONFIG_PATH)\n proto_dataset.add_item_metadata(\n handle=relpath_in_dataset,\n key=key,\n value=value)", - "docstring": "Add metadata to a file in the proto dataset." - }, - { - "code": "def figure_protocol(self):\n self.log.debug(\"creating overlayed protocols plot\")\n self.figure()\n plt.plot(self.abf.protoX,self.abf.protoY,color='r')\n self.marginX=0\n self.decorate(protocol=True)", - "docstring": "plot the current sweep protocol." - }, - { - "code": "def do_action(self, action):\n temp_state = np.rot90(self._state, action)\n reward = self._do_action_left(temp_state)\n self._state = np.rot90(temp_state, -action)\n self._score += reward\n self.add_random_tile()\n return reward", - "docstring": "Execute action, add a new tile, update the score & return the reward." - }, - { - "code": "def peripheral_didUpdateValueForDescriptor_error_(self, peripheral, descriptor, error):\n logger.debug('peripheral_didUpdateValueForDescriptor_error called')\n if error is not None:\n return\n device = device_list().get(peripheral)\n if device is not None:\n device._descriptor_changed(descriptor)", - "docstring": "Called when descriptor value was read or updated." - }, - { - "code": "def _handle_state_change_msg(self, new_helper):\n assert self.my_pplan_helper is not None\n assert self.my_instance is not None and self.my_instance.py_class is not None\n if self.my_pplan_helper.get_topology_state() != new_helper.get_topology_state():\n self.my_pplan_helper = new_helper\n if new_helper.is_topology_running():\n if not self.is_instance_started:\n self.start_instance_if_possible()\n self.my_instance.py_class.invoke_activate()\n elif new_helper.is_topology_paused():\n self.my_instance.py_class.invoke_deactivate()\n else:\n raise RuntimeError(\"Unexpected TopologyState update: %s\" % new_helper.get_topology_state())\n else:\n Log.info(\"Topology state remains the same.\")", - "docstring": "Called when state change is commanded by stream manager" - }, - { - "code": "def generate_oauth2_headers(self):\n encoded_credentials = base64.b64encode(('{0}:{1}'.format(self.consumer_key,self.consumer_secret)).encode('utf-8'))\n headers={\n 'Authorization':'Basic {0}'.format(encoded_credentials.decode('utf-8')),\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n return headers", - "docstring": "Generates header for oauth2" - }, - { - "code": "def filter_config(config, deploy_config):\n if not os.path.isfile(deploy_config):\n return DotDict()\n config_module = get_config_module(deploy_config)\n return config_module.filter(config)", - "docstring": "Return a config subset using the filter defined in the deploy config." - }, - { - "code": "def _assert(self, expression: Bool):\n assert isinstance(expression, Bool)\n smtlib = translate_to_smtlib(expression)\n self._send('(assert %s)' % smtlib)", - "docstring": "Auxiliary method to send an assert" - }, - { - "code": "def save(self):\n token = models.PasswordResetToken.objects.get(\n key=self.validated_data[\"key\"]\n )\n token.email.user.set_password(self.validated_data[\"password\"])\n token.email.user.save()\n logger.info(\"Reset password for %s\", token.email.user)\n token.delete()", - "docstring": "Reset the user's password if the provided information is valid." - }, - { - "code": "def update(self, server):\n return server.put(\n 'challenge_admin',\n self.as_payload(),\n replacements={'slug': self.slug})", - "docstring": "Update existing challenge on the server" - }, - { - "code": "def _map(self, event):\n description = event.get('description', '')\n start_time = google_base.parse_rfc3339_utc_string(\n event.get('timestamp', ''))\n for name, regex in _EVENT_REGEX_MAP.items():\n match = regex.match(description)\n if match:\n return {'name': name, 'start-time': start_time}, match\n return {'name': description, 'start-time': start_time}, None", - "docstring": "Extract elements from an operation event and map to a named event." - }, - { - "code": "def build_extra_args_dict(cl_args):\n component_parallelism = cl_args['component_parallelism']\n runtime_configs = cl_args['runtime_config']\n container_number = cl_args['container_number']\n if (component_parallelism and runtime_configs) or (container_number and runtime_configs):\n raise Exception(\n \"(component-parallelism or container_num) and runtime-config \" +\n \"can't be updated at the same time\")\n dict_extra_args = {}\n nothing_set = True\n if component_parallelism:\n dict_extra_args.update({'component_parallelism': component_parallelism})\n nothing_set = False\n if container_number:\n dict_extra_args.update({'container_number': container_number})\n nothing_set = False\n if runtime_configs:\n dict_extra_args.update({'runtime_config': runtime_configs})\n nothing_set = False\n if nothing_set:\n raise Exception(\n \"Missing arguments --component-parallelism or --runtime-config or --container-number\")\n if cl_args['dry_run']:\n dict_extra_args.update({'dry_run': True})\n if 'dry_run_format' in cl_args:\n dict_extra_args.update({'dry_run_format': cl_args[\"dry_run_format\"]})\n return dict_extra_args", - "docstring": "Build extra args map" - }, - { - "code": "def _insert_to_array(self, chunk, results):\n chunksize = self._chunksize\n qrts, invs = results\n with h5py.File(self.database.output, 'r+') as io5:\n io5['quartets'][chunk:chunk+chunksize] = qrts\n if self.params.save_invariants:\n if self.checkpoint.boots:\n key = \"invariants/boot{}\".format(self.checkpoint.boots)\n io5[key][chunk:chunk+chunksize] = invs\n else:\n io5[\"invariants/boot0\"][chunk:chunk+chunksize] = invs", - "docstring": "Enters results arrays into the HDF5 database." - }, - { - "code": "def _tz(self, z):\n return (z-self.param_dict['psf-zslab'])*self.param_dict[self.zscale]", - "docstring": "Transform z to real-space coordinates from tile coordinates" - }, - { - "code": "def find_task_descriptor(self, task_id):\n for task_descriptor in self.task_descriptors:\n if task_descriptor.task_metadata.get('task-id') == task_id:\n return task_descriptor\n return None", - "docstring": "Returns the task_descriptor corresponding to task_id." - }, - { - "code": "def id_to_word(self, word_id):\n if word_id >= len(self.reverse_vocab):\n return self.reverse_vocab[self.unk_id]\n else:\n return self.reverse_vocab[word_id]", - "docstring": "Returns the word string of an integer word id." - }, - { - "code": "def present_results(self, query_text, n=10):\n \"Get results for the query and present them.\"\n self.present(self.query(query_text, n))", - "docstring": "Get results for the query and present them." - }, - { - "code": "def err(format_msg, *args, **kwargs):\n exc_info = kwargs.pop(\"exc_info\", False)\n stderr.warning(str(format_msg).format(*args, **kwargs), exc_info=exc_info)", - "docstring": "print format_msg to stderr" - }, - { - "code": "def validate_string_list(value):\n try:\n if sys.version_info.major < 3:\n from locale import getpreferredencoding\n encoding = getpreferredencoding()\n value = value.decode(encoding)\n return [x.strip() for x in value.split(u\",\")]\n except (AttributeError, TypeError, UnicodeError):\n raise ValueError(\"Bad string list\")", - "docstring": "Validator for string lists to be used with `add_setting`." - }, - { - "code": "def post_publication_processing(event, cursor):\n module_ident, ident_hash = event.module_ident, event.ident_hash\n celery_app = get_current_registry().celery_app\n cursor.execute('SELECT result_id::text '\n 'FROM document_baking_result_associations '\n 'WHERE module_ident = %s', (module_ident,))\n for result in cursor.fetchall():\n state = celery_app.AsyncResult(result[0]).state\n if state in ('QUEUED', 'STARTED', 'RETRY'):\n logger.debug('Already queued module_ident={} ident_hash={}'.format(\n module_ident, ident_hash))\n return\n logger.debug('Queued for processing module_ident={} ident_hash={}'.format(\n module_ident, ident_hash))\n recipe_ids = _get_recipe_ids(module_ident, cursor)\n update_module_state(cursor, module_ident, 'processing', recipe_ids[0])\n cursor.connection.commit()\n task_name = 'cnxpublishing.subscribers.baking_processor'\n baking_processor = celery_app.tasks[task_name]\n result = baking_processor.delay(module_ident, ident_hash)\n baking_processor.backend.store_result(result.id, None, 'QUEUED')\n track_baking_proc_state(result, module_ident, cursor)", - "docstring": "Process post-publication events coming out of the database." - }, - { - "code": "def run(self, cmd, *args):\n if self.manager is None:\n raise Exception(\"Fatal internal error: Missing repository manager\")\n if cmd not in dir(self.manager):\n raise Exception(\"Fatal internal error: Invalid command {} being run\".format(cmd))\n func = getattr(self.manager, cmd)\n repo = self\n return func(repo, *args)", - "docstring": "Run a specific command using the manager" - }, - { - "code": "def _prepare_io_handler_cb(self, handler):\n self._anything_done = True\n logger.debug(\"_prepar_io_handler_cb called for {0!r}\".format(handler))\n self._configure_io_handler(handler)\n self._prepare_sources.pop(handler, None)\n return False", - "docstring": "Timeout callback called to try prepare an IOHandler again." - }, - { - "code": "def isdir(s):\n try:\n st = os.stat(s)\n except os.error:\n return False\n return stat.S_ISDIR(st.st_mode)", - "docstring": "Return true if the pathname refers to an existing directory." - }, - { - "code": "def _get_app_libs_volume_mounts(app_name, assembled_specs):\n volumes = []\n for lib_name in assembled_specs['apps'][app_name]['depends']['libs']:\n lib_spec = assembled_specs['libs'][lib_name]\n volumes.append(\"{}:{}\".format(Repo(lib_spec['repo']).vm_path, container_code_path(lib_spec)))\n return volumes", - "docstring": "Returns a list of the formatted volume mounts for all libs that an app uses" - }, - { - "code": "def field_type(self):\n if not self.model:\n return 'JSON'\n database = self.model._meta.database\n if isinstance(database, Proxy):\n database = database.obj\n if Json and isinstance(database, PostgresqlDatabase):\n return 'JSON'\n return 'TEXT'", - "docstring": "Return database field type." - }, - { - "code": "def create_feature_array(text, n_pad=21):\n n = len(text)\n n_pad_2 = int((n_pad - 1)/2)\n text_pad = [' '] * n_pad_2 + [t for t in text] + [' '] * n_pad_2\n x_char, x_type = [], []\n for i in range(n_pad_2, n_pad_2 + n):\n char_list = text_pad[i + 1: i + n_pad_2 + 1] + \\\n list(reversed(text_pad[i - n_pad_2: i])) + \\\n [text_pad[i]]\n char_map = [CHARS_MAP.get(c, 80) for c in char_list]\n char_type = [CHAR_TYPES_MAP.get(CHAR_TYPE_FLATTEN.get(c, 'o'), 4)\n for c in char_list]\n x_char.append(char_map)\n x_type.append(char_type)\n x_char = np.array(x_char).astype(float)\n x_type = np.array(x_type).astype(float)\n return x_char, x_type", - "docstring": "Create feature array of character and surrounding characters" - }, - { - "code": "def _dump_registry(cls, file=None):\n print >> file, \"Class: %s.%s\" % (cls.__module__, cls.__name__)\n print >> file, \"Inv.counter: %s\" % ABCMeta._abc_invalidation_counter\n for name in sorted(cls.__dict__.keys()):\n if name.startswith(\"_abc_\"):\n value = getattr(cls, name)\n print >> file, \"%s: %r\" % (name, value)", - "docstring": "Debug helper to print the ABC registry." - }, - { - "code": "def _generate_html_diff(self, expected_fn, expected_lines, obtained_fn, obtained_lines):\n import difflib\n differ = difflib.HtmlDiff()\n return differ.make_file(\n fromlines=expected_lines,\n fromdesc=expected_fn,\n tolines=obtained_lines,\n todesc=obtained_fn,\n )", - "docstring": "Returns a nice side-by-side diff of the given files, as a string." - }, - { - "code": "def _tile(self, n):\n zsc = np.array([1.0/self.zscale, 1, 1])\n pos, rad = self.pos[n], self.rad[n]\n pos = self._trans(pos)\n return Tile(pos - zsc*rad, pos + zsc*rad).pad(self.support_pad)", - "docstring": "Get the tile surrounding particle `n`" - }, - { - "code": "def of_structs(cls, a, b):\n t_diff = ThriftDiff(a, b)\n t_diff._do_diff()\n return t_diff", - "docstring": "Diff two thrift structs and return the result as a ThriftDiff instance" - }, - { - "code": "def queries_map():\n qs = _all_metric_queries()\n return dict(zip(qs[0], qs[1]) + zip(qs[2], qs[3]))", - "docstring": "map from query parameter to query name" - }, - { - "code": "def url_is_valid(self, url):\n if url.startswith(\"file://\"):\n url = url.replace(\"file://\",\"\")\n return os.path.exists(url)", - "docstring": "Check if a URL exists" - }, - { - "code": "def validate_lms_user_id(self, value):\n enterprise_customer = self.context.get('enterprise_customer')\n try:\n return models.EnterpriseCustomerUser.objects.get(\n user_id=value,\n enterprise_customer=enterprise_customer\n )\n except models.EnterpriseCustomerUser.DoesNotExist:\n pass\n return None", - "docstring": "Validates the lms_user_id, if is given, to see if there is an existing EnterpriseCustomerUser for it." - }, - { - "code": "def env():\n if cij.ssh.env():\n cij.err(\"board.env: invalid SSH environment\")\n return 1\n board = cij.env_to_dict(PREFIX, REQUIRED)\n if board is None:\n cij.err(\"board.env: invalid BOARD environment\")\n return 1\n board[\"CLASS\"] = \"_\".join([board[r] for r in REQUIRED[:-1]])\n board[\"IDENT\"] = \"-\".join([board[\"CLASS\"], board[\"ALIAS\"]])\n cij.env_export(PREFIX, EXPORTED, board)\n return 0", - "docstring": "Verify BOARD variables and construct exported variables" - }, - { - "code": "def get(self, key, default=None):\n if self.in_memory:\n return self._memory_db.get(key, default)\n else:\n db = self._read_file()\n return db.get(key, default)", - "docstring": "Get key value, return default if key doesn't exist" - }, - { - "code": "def below(self, ref):\n if not self._valid_ordering_reference(ref):\n raise ValueError(\n \"%r can only be moved below instances of %r which %s equals %r.\" % (\n self, self.__class__, self.order_with_respect_to,\n self._get_order_with_respect_to()\n )\n )\n if self.order == ref.order:\n return\n if self.order > ref.order:\n o = self.get_ordering_queryset().filter(order__gt=ref.order).aggregate(Min('order')).get('order__min') or 0\n else:\n o = ref.order\n self.to(o)", - "docstring": "Move this object below the referenced object." - }, - { - "code": "def group_theta(self, group):\n for i, g in enumerate(self.nodes.keys()):\n if g == group:\n break\n return i * self.major_angle", - "docstring": "Computes the theta along which a group's nodes are aligned." - }, - { - "code": "def __got_ack(self):\n log.debug('waiting for ack')\n res = self._port.read(1)\n log.debug('ack read %s', hexify(res))\n return res == ACK", - "docstring": "Returns true if ACK is received" - }, - { - "code": "def __clear_buffers(self):\n try:\n self._port.reset_input_buffer()\n self._port.reset_output_buffer()\n except AttributeError:\n self._port.flushInput()\n self._port.flushOutput()", - "docstring": "Clears the input and output buffers" - }, - { - "code": "def _get_range(book, range_, sheet):\n filename = None\n if isinstance(book, str):\n filename = book\n book = opxl.load_workbook(book, data_only=True)\n elif isinstance(book, opxl.Workbook):\n pass\n else:\n raise TypeError\n if _is_range_address(range_):\n sheet_names = [name.upper() for name in book.sheetnames]\n index = sheet_names.index(sheet.upper())\n data = book.worksheets[index][range_]\n else:\n data = _get_namedrange(book, range_, sheet)\n if data is None:\n raise ValueError(\n \"Named range '%s' not found in %s\" % (range_, filename or book)\n )\n return data", - "docstring": "Return a range as nested dict of openpyxl cells." - }, - { - "code": "def _fire(self, layers, things, the_plot):\n if the_plot.get('last_player_shot') == the_plot.frame: return\n the_plot['last_player_shot'] = the_plot.frame\n row, col = things['P'].position\n self._teleport((row-1, col))", - "docstring": "Launches a new bolt from the player." - }, - { - "code": "def _parse_allele_data(self):\n return [Allele(sequence=x) for x in\n [self.ref_allele] + self.alt_alleles]", - "docstring": "Create list of Alleles from VCF line data" - }, - { - "code": "def remove(self, guild_id):\r\n if guild_id in self._players:\r\n self._players[guild_id].cleanup()\r\n del self._players[guild_id]", - "docstring": "Removes a player from the current players." - }, - { - "code": "def dropbox_submission(dropbox, request):\n try:\n data = dropbox_schema.deserialize(request.POST)\n except Exception:\n return HTTPFound(location=request.route_url('dropbox_form'))\n dropbox.message = data.get('message')\n if 'testing_secret' in dropbox.settings:\n dropbox.from_watchdog = is_equal(\n unicode(dropbox.settings['test_submission_secret']),\n data.pop('testing_secret', u''))\n if data.get('upload') is not None:\n dropbox.add_attachment(data['upload'])\n dropbox.submit()\n drop_url = request.route_url('dropbox_view', drop_id=dropbox.drop_id)\n print(\"Created dropbox %s\" % drop_url)\n return HTTPFound(location=drop_url)", - "docstring": "handles the form submission, redirects to the dropbox's status page." - }, - { - "code": "def chunks(dictionary, chunk_size):\n iterable = iter(dictionary)\n for __ in range(0, len(dictionary), chunk_size):\n yield {key: dictionary[key] for key in islice(iterable, chunk_size)}", - "docstring": "Yield successive n-sized chunks from dictionary." - }, - { - "code": "def plotAllSweeps(abfFile):\n r = io.AxonIO(filename=abfFile)\n bl = r.read_block(lazy=False, cascade=True) \n print(abfFile+\"\\nplotting %d sweeps...\"%len(bl.segments))\n plt.figure(figsize=(12,10))\n plt.title(abfFile)\n for sweep in range(len(bl.segments)):\n trace = bl.segments[sweep].analogsignals[0]\n plt.plot(trace.times-trace.times[0],trace.magnitude,alpha=.5) \n plt.ylabel(trace.dimensionality)\n plt.xlabel(\"seconds\")\n plt.show()\n plt.close()", - "docstring": "simple example how to load an ABF file and plot every sweep." - }, - { - "code": "def reverse(self):\n if self._original_target_content:\n with open(self.target, 'w') as fp:\n fp.write(self._original_target_content)", - "docstring": "Restore content in target file to be before any changes" - }, - { - "code": "def stop_scan(self, timeout_sec=TIMEOUT_SEC):\n self._scan_stopped.clear()\n self._adapter.StopDiscovery()\n if not self._scan_stopped.wait(timeout_sec):\n raise RuntimeError('Exceeded timeout waiting for adapter to stop scanning!')", - "docstring": "Stop scanning for BLE devices with this adapter." - }, - { - "code": "def build(self, bldr):\n stage_names = sets.Set()\n for source in self._sources:\n source._build(bldr, stage_names)\n for source in self._sources:\n if not source._all_built():\n raise RuntimeError(\"Topology cannot be fully built! Are all sources added?\")", - "docstring": "Builds the topology and returns the builder" - }, - { - "code": "def trim_Ns(self):\n i = 0\n while i < len(self) and self.seq[i] in 'nN':\n i += 1\n self.seq = self.seq[i:]\n self.qual = self.qual[i:]\n self.seq = self.seq.rstrip('Nn')\n self.qual = self.qual[:len(self.seq)]", - "docstring": "Removes any leading or trailing N or n characters from the sequence" - }, - { - "code": "def _get_base_component(self):\n comp = topology_pb2.Component()\n comp.name = self.name\n comp.spec = topology_pb2.ComponentObjectSpec.Value(\"PYTHON_CLASS_NAME\")\n comp.class_name = self.python_class_path\n comp.config.CopyFrom(self._get_comp_config())\n return comp", - "docstring": "Returns Component protobuf message" - }, - { - "code": "def create(self, name, value):\n if value is None:\n raise ValueError('Setting value cannot be `None`.')\n model = Setting.get_model_for_value(value)\n obj = super(SettingQuerySet, model.objects.all()) \\\n .create(name=name, value=value)\n return obj", - "docstring": "Creates and returns an object of the appropriate type for ``value``." - }, - { - "code": "def drop_bel_namespace(self) -> Optional[Namespace]:\n namespace = self._get_default_namespace()\n if namespace is not None:\n for entry in tqdm(namespace.entries, desc=f'deleting entries in {self._get_namespace_name()}'):\n self.session.delete(entry)\n self.session.delete(namespace)\n log.info('committing deletions')\n self.session.commit()\n return namespace", - "docstring": "Remove the default namespace if it exists." - }, - { - "code": "def _flann_args(self, X=None):\n \"The dictionary of arguments to give to FLANN.\"\n args = {'cores': self._n_jobs}\n if self.flann_algorithm == 'auto':\n if X is None or X.dim > 5:\n args['algorithm'] = 'linear'\n else:\n args['algorithm'] = 'kdtree_single'\n else:\n args['algorithm'] = self.flann_algorithm\n if self.flann_args:\n args.update(self.flann_args)\n try:\n FLANNParameters().update(args)\n except AttributeError as e:\n msg = \"flann_args contains an invalid argument:\\n {}\"\n raise TypeError(msg.format(e))\n return args", - "docstring": "The dictionary of arguments to give to FLANN." - }, - { - "code": "def _convert(value, to_type, default=None):\n try:\n return default if value is None else to_type(value)\n except ValueError:\n return default", - "docstring": "Convert value to to_type, returns default if fails." - }, - { - "code": "def _performAction(self, action):\n try:\n _a11y.AXUIElement._performAction(self, 'AX%s' % action)\n except _a11y.ErrorUnsupported as e:\n sierra_ver = '10.12'\n if mac_ver()[0] < sierra_ver:\n raise e\n else:\n pass", - "docstring": "Perform the specified action." - }, - { - "code": "def auto_add(repo, autooptions, files):\n mapping = { \".\": \"\" }\n if (('import' in autooptions) and\n ('directory-mapping' in autooptions['import'])):\n mapping = autooptions['import']['directory-mapping']\n keys = mapping.keys()\n keys = sorted(keys, key=lambda k: len(k), reverse=True)\n count = 0\n params = []\n for f in files:\n relativepath = f\n for k in keys:\n v = mapping[k]\n if f.startswith(k + \"/\"):\n relativepath = f.replace(k + \"/\", v)\n break\n count += files_add(repo=repo,\n args=[f],\n targetdir=os.path.dirname(relativepath))\n return count", - "docstring": "Cleanup the paths and add" - }, - { - "code": "def format_objects(objects, children=False, columns=None, header=True):\n columns = columns or ('NAME', 'TYPE', 'PATH')\n objects = sorted(objects, key=_type_and_name)\n data = []\n for obj in objects:\n if isinstance(obj, cpenv.VirtualEnvironment):\n data.append(get_info(obj))\n modules = obj.get_modules()\n if children and modules:\n for mod in modules:\n data.append(get_info(mod, indent=2, root=obj.path))\n else:\n data.append(get_info(obj))\n maxes = [len(max(col, key=len)) for col in zip(*data)]\n tmpl = '{:%d} {:%d} {:%d}' % tuple(maxes)\n lines = []\n if header:\n lines.append('\\n' + bold_blue(tmpl.format(*columns)))\n for obj_data in data:\n lines.append(tmpl.format(*obj_data))\n return '\\n'.join(lines)", - "docstring": "Format a list of environments and modules for terminal output" - }, - { - "code": "def awake(self, procid):\n logger.debug(f\"Remove procid:{procid} from waitlists and reestablish it in the running list\")\n for wait_list in self.rwait:\n if procid in wait_list:\n wait_list.remove(procid)\n for wait_list in self.twait:\n if procid in wait_list:\n wait_list.remove(procid)\n self.timers[procid] = None\n self.running.append(procid)\n if self._current is None:\n self._current = procid", - "docstring": "Remove procid from waitlists and reestablish it in the running list" - }, - { - "code": "def read(self, size=None):\n while size is None or len(self.buffer) < size:\n try:\n self.buffer += next(self.data_stream)\n except StopIteration:\n break\n sized_chunk = self.buffer[:size]\n if size is None:\n self.buffer = \"\"\n else:\n self.buffer = self.buffer[size:]\n return sized_chunk", - "docstring": "Read bytes from an iterator." - }, - { - "code": "def _getW(self):\n w = self._w\n if type(w) is list:\n return w[self._random.getUInt32(len(w))]\n else:\n return w", - "docstring": "Gets a value of `w` for use in generating a pattern." - }, - { - "code": "def input_from_cons(constupl, datas):\n ' solve bytes in |datas| based on '\n def make_chr(c):\n try:\n return chr(c)\n except Exception:\n return c\n newset = constraints_to_constraintset(constupl)\n ret = ''\n for data in datas:\n for c in data:\n ret += make_chr(solver.get_value(newset, c))\n return ret", - "docstring": "solve bytes in |datas| based on" - }, - { - "code": "def collect_entities (sent, ranks, stopwords, spacy_nlp):\n global DEBUG\n sent_text = \" \".join([w.raw for w in sent])\n if DEBUG:\n print(\"sent:\", sent_text)\n for ent in spacy_nlp(sent_text).ents:\n if DEBUG:\n print(\"NER:\", ent.label_, ent.text)\n if (ent.label_ not in [\"CARDINAL\"]) and (ent.text.lower() not in stopwords):\n w_ranks, w_ids = find_entity(sent, ranks, ent.text.split(\" \"), 0)\n if w_ranks and w_ids:\n rl = RankedLexeme(text=ent.text.lower(), rank=w_ranks, ids=w_ids, pos=\"np\", count=1)\n if DEBUG:\n print(rl)\n yield rl", - "docstring": "iterator for collecting the named-entities" - }, - { - "code": "def read_environment():\n out = {}\n for k,v in iteritems(os.environ):\n if transform(k) in default_conf:\n out[transform(k)] = v\n return out", - "docstring": "Read all environment variables to see if they contain PERI" - }, - { - "code": "def _ordinal_metric(_v1, _v2, i1, i2, n_v):\n if i1 > i2:\n i1, i2 = i2, i1\n return (np.sum(n_v[i1:(i2 + 1)]) - (n_v[i1] + n_v[i2]) / 2) ** 2", - "docstring": "Metric for ordinal data." - }, - { - "code": "def build_machine_type(cls, min_cores, min_ram):\n min_cores = min_cores or job_model.DEFAULT_MIN_CORES\n min_ram = min_ram or job_model.DEFAULT_MIN_RAM\n min_ram *= GoogleV2CustomMachine._MB_PER_GB\n cores = cls._validate_cores(min_cores)\n ram = cls._validate_ram(min_ram)\n memory_to_cpu_ratio = ram / cores\n if memory_to_cpu_ratio < GoogleV2CustomMachine._MIN_MEMORY_PER_CPU:\n adjusted_ram = GoogleV2CustomMachine._MIN_MEMORY_PER_CPU * cores\n ram = cls._validate_ram(adjusted_ram)\n elif memory_to_cpu_ratio > GoogleV2CustomMachine._MAX_MEMORY_PER_CPU:\n adjusted_cores = math.ceil(\n ram / GoogleV2CustomMachine._MAX_MEMORY_PER_CPU)\n cores = cls._validate_cores(adjusted_cores)\n else:\n pass\n return 'custom-{}-{}'.format(int(cores), int(ram))", - "docstring": "Returns a custom machine type string." - }, - { - "code": "def _translateCommands(commands):\n for command in commands.split(','):\n result = [0, 0]\n device, command = command.strip().upper().split(None, 1)\n result[0] = houseCodes[device[0]]\n if len(device) > 1:\n deviceNumber = deviceNumbers[device[1:]]\n result[0] |= deviceNumber[0]\n result[1] = deviceNumber[1]\n result[1] |= commandCodes[command]\n yield ' '.join(map(_strBinary, result))", - "docstring": "Generate the binary strings for a comma seperated list of commands." - }, - { - "code": "def add_event(request):\n form = AddEventForm(request.POST or None)\n if form.is_valid():\n instance = form.save(commit=False)\n instance.sites = settings.SITE_ID\n instance.submitted_by = request.user\n instance.approved = True\n instance.slug = slugify(instance.name)\n instance.save()\n messages.success(request, 'Your event has been added.')\n return HttpResponseRedirect(reverse('events_index'))\n return render(request, 'happenings/event_form.html', {\n 'form': form,\n 'form_title': 'Add an event'\n })", - "docstring": "Public form to add an event." - }, - { - "code": "def _initializeBucketMap(self, maxBuckets, offset):\n self._maxBuckets = maxBuckets\n self.minIndex = self._maxBuckets / 2\n self.maxIndex = self._maxBuckets / 2\n self._offset = offset\n self.bucketMap = {}\n def _permutation(n):\n r = numpy.arange(n, dtype=numpy.uint32)\n self.random.shuffle(r)\n return r\n self.bucketMap[self.minIndex] = _permutation(self.n)[0:self.w]\n self.numTries = 0", - "docstring": "Initialize the bucket map assuming the given number of maxBuckets." - }, - { - "code": "def MSTORE8(self, address, value):\n if istainted(self.pc):\n for taint in get_taints(self.pc):\n value = taint_with(value, taint)\n self._allocate(address, 1)\n self._store(address, Operators.EXTRACT(value, 0, 8), 1)", - "docstring": "Save byte to memory" - }, - { - "code": "def pick_coda_from_decimal(decimal):\n decimal = Decimal(decimal)\n __, digits, exp = decimal.as_tuple()\n if exp < 0:\n return DIGIT_CODAS[digits[-1]]\n __, digits, exp = decimal.normalize().as_tuple()\n index = bisect_right(EXP_INDICES, exp) - 1\n if index < 0:\n return DIGIT_CODAS[digits[-1]]\n else:\n return EXP_CODAS[EXP_INDICES[index]]", - "docstring": "Picks only a coda from a decimal." - }, - { - "code": "async def send(self, message: Message) -> None:\n if self.application_state == WebSocketState.CONNECTING:\n message_type = message[\"type\"]\n assert message_type in {\"websocket.accept\", \"websocket.close\"}\n if message_type == \"websocket.close\":\n self.application_state = WebSocketState.DISCONNECTED\n else:\n self.application_state = WebSocketState.CONNECTED\n await self._send(message)\n elif self.application_state == WebSocketState.CONNECTED:\n message_type = message[\"type\"]\n assert message_type in {\"websocket.send\", \"websocket.close\"}\n if message_type == \"websocket.close\":\n self.application_state = WebSocketState.DISCONNECTED\n await self._send(message)\n else:\n raise RuntimeError('Cannot call \"send\" once a close message has been sent.')", - "docstring": "Send ASGI websocket messages, ensuring valid state transitions." - }, - { - "code": "def default(self, obj):\n if isinstance(obj, np.ndarray):\n return obj.tolist()\n elif isinstance(obj, np.generic):\n return np.asscalar(obj)\n return json.JSONEncoder(self, obj)", - "docstring": "If input object is an ndarray it will be converted into a list" - }, - { - "code": "def GetChildren(self) -> list:\n children = []\n child = self.GetFirstChildControl()\n while child:\n children.append(child)\n child = child.GetNextSiblingControl()\n return children", - "docstring": "Return list, a list of `Control` subclasses." - }, - { - "code": "def main():\n args = argparser().parse_args(sys.argv[1:])\n password = os.environ.get('PYNETGEAR_PASSWORD') or args.password\n netgear = Netgear(password, args.host, args.user, args.port, args.ssl, args.url, args.force_login_v2)\n results = run_subcommand(netgear, args)\n formatter = make_formatter(args.format)\n if results is None:\n print(\"Error communicating with the Netgear router\")\n else:\n formatter(results)", - "docstring": "Scan for devices and print results." - }, - { - "code": "def link_zscale(st):\n psf = st.get('psf')\n psf.param_dict['zscale'] = psf.param_dict['psf-zscale']\n psf.params[psf.params.index('psf-zscale')] = 'zscale'\n psf.global_zscale = True\n psf.param_dict.pop('psf-zscale')\n st.trigger_parameter_change()\n st.reset()", - "docstring": "Links the state ``st`` psf zscale with the global zscale" - }, - { - "code": "def _get_children_by_tag_name(node, name):\n try:\n return [child for child in node.childNodes if child.nodeName == name]\n except TypeError:\n return []", - "docstring": "Retrieve all children from node 'node' with name 'name'." - }, - { - "code": "def _get_container_port_mappings(app):\n container = app['container']\n port_mappings = container.get('portMappings')\n if port_mappings is None and 'docker' in container:\n port_mappings = container['docker'].get('portMappings')\n return port_mappings", - "docstring": "Get the ``portMappings`` field for the app container." - }, - { - "code": "def change_password(self, newpassword):\n if not self.unlocked():\n raise WalletLocked\n self.password = newpassword\n self._save_encrypted_masterpassword()", - "docstring": "Change the password that allows to decrypt the master key" - }, - { - "code": "def save_act(self, path=None):\n if path is None:\n path = os.path.join(logger.get_dir(), \"model.pkl\")\n with tempfile.TemporaryDirectory() as td:\n save_variables(os.path.join(td, \"model\"))\n arc_name = os.path.join(td, \"packed.zip\")\n with zipfile.ZipFile(arc_name, 'w') as zipf:\n for root, dirs, files in os.walk(td):\n for fname in files:\n file_path = os.path.join(root, fname)\n if file_path != arc_name:\n zipf.write(file_path, os.path.relpath(file_path, td))\n with open(arc_name, \"rb\") as f:\n model_data = f.read()\n with open(path, \"wb\") as f:\n cloudpickle.dump((model_data, self._act_params), f)", - "docstring": "Save model to a pickle located at `path`" - }, - { - "code": "def _get_checked_path(path, config, must_exist=True, allow_none=True):\n if path in (None, \"\"):\n if allow_none:\n return None\n raise ValueError(\"Invalid path {!r}\".format(path))\n config_file = config.get(\"_config_file\")\n if config_file and not os.path.isabs(path):\n path = os.path.normpath(os.path.join(os.path.dirname(config_file), path))\n else:\n path = os.path.abspath(path)\n if must_exist and not os.path.exists(path):\n raise ValueError(\"Invalid path {!r}\".format(path))\n return path", - "docstring": "Convert path to absolute if not None." - }, - { - "code": "def unlock(self, pwd):\n if self.store.is_encrypted():\n return self.store.unlock(pwd)", - "docstring": "Unlock the wallet database" - }, - { - "code": "def FindNextMultiLineCommentStart(lines, lineix):\n while lineix < len(lines):\n if lines[lineix].strip().startswith('/*'):\n if lines[lineix].strip().find('*/', 2) < 0:\n return lineix\n lineix += 1\n return len(lines)", - "docstring": "Find the beginning marker for a multiline comment." - }, - { - "code": "def eval(self, expression, use_compilation_plan=False):\n code = 'PyJsEvalResult = eval(%s)' % json.dumps(expression)\n self.execute(code, use_compilation_plan=use_compilation_plan)\n return self['PyJsEvalResult']", - "docstring": "evaluates expression in current context and returns its value" - }, - { - "code": "def dropbox_factory(request):\n try:\n return request.registry.settings['dropbox_container'].get_dropbox(request.matchdict['drop_id'])\n except KeyError:\n raise HTTPNotFound('no such dropbox')", - "docstring": "expects the id of an existing dropbox and returns its instance" - }, - { - "code": "def escape(pattern):\n \"Escape all non-alphanumeric characters in pattern.\"\n s = list(pattern)\n alphanum = _alphanum\n for i, c in enumerate(pattern):\n if c not in alphanum:\n if c == \"\\000\":\n s[i] = \"\\\\000\"\n else:\n s[i] = \"\\\\\" + c\n return pattern[:0].join(s)", - "docstring": "Escape all non-alphanumeric characters in pattern." - }, - { - "code": "def _separate(self, kwargs):\n self._pop_none(kwargs)\n result = {}\n for field in Resource.config_fields:\n if field in kwargs:\n result[field] = kwargs.pop(field)\n if field in Resource.json_fields:\n if not isinstance(result[field], six.string_types):\n continue\n try:\n data = json.loads(result[field])\n result[field] = data\n except ValueError:\n raise exc.TowerCLIError('Provided json file format '\n 'invalid. Please recheck.')\n return result", - "docstring": "Remove None-valued and configuration-related keyworded arguments" - }, - { - "code": "def _at_block_start(tc, line):\n if tc.atBlockStart():\n return True\n column = tc.columnNumber()\n indentation = len(line) - len(line.lstrip())\n return column <= indentation", - "docstring": "Improve QTextCursor.atBlockStart to ignore spaces" - }, - { - "code": "def create_acl(self, name):\n if name in self._acl:\n return False\n self._acl[name] = {\n 'allow': [],\n 'deny': []\n }\n return True", - "docstring": "Create a new acl." - }, - { - "code": "def _patch(self, route, data, headers=None, failure_message=None):\n headers = self._get_headers(headers)\n response_lambda = (\n lambda: requests.patch(\n self._get_qualified_route(route), headers=headers, data=data, verify=False, proxies=self.proxies\n )\n )\n response = check_for_rate_limiting(response_lambda(), response_lambda)\n return self._handle_response(response, failure_message)", - "docstring": "Execute a patch request and return the result" - }, - { - "code": "def removeLogbook(self, menu=None):\n if self.logMenuCount > 1 and menu is not None:\n menu.removeMenu()\n self.logMenus.remove(menu)\n self.logMenuCount -= 1", - "docstring": "Remove logbook menu set." - }, - { - "code": "def expand_args(command):\n if isinstance(command, (str, unicode)):\n splitter = shlex.shlex(command.encode('utf-8'))\n splitter.whitespace = '|'\n splitter.whitespace_split = True\n command = []\n while True:\n token = splitter.get_token()\n if token:\n command.append(token)\n else:\n break\n command = list(map(shlex.split, command))\n return command", - "docstring": "Parses command strings and returns a Popen-ready list." - }, - { - "code": "async def set_active_client(self, set_active_client_request):\n response = hangouts_pb2.SetActiveClientResponse()\n await self._pb_request('clients/setactiveclient',\n set_active_client_request, response)\n return response", - "docstring": "Set the active client." - }, - { - "code": "def from_spec(spec, kwargs):\n env = tensorforce.util.get_object(\n obj=spec,\n predefined_objects=tensorforce.environments.environments,\n kwargs=kwargs\n )\n assert isinstance(env, Environment)\n return env", - "docstring": "Creates an environment from a specification dict." - }, - { - "code": "def lint(context):\n config = context.obj\n try:\n run('flake8 {dir} --exclude={exclude}'.format(\n dir=config['CWD'],\n exclude=','.join(EXCLUDE),\n ))\n except SubprocessError:\n context.exit(1)", - "docstring": "Looks for errors in source code of your blog" - }, - { - "code": "def apply_changesets(args, changesets, catalog):\n tmpdir = tempfile.mkdtemp()\n tmp_patch = join(tmpdir, \"tmp.patch\")\n tmp_lcat = join(tmpdir, \"tmp.lcat\")\n for node in changesets:\n remove(tmp_patch)\n copy(node.mfile['changeset']['filename'], tmp_patch)\n logging.info(\"mv %s %s\"%(catalog, tmp_lcat))\n shutil.move(catalog, tmp_lcat)\n cmd = args.patch_cmd.replace(\"$in1\", tmp_lcat)\\\n .replace(\"$patch\", tmp_patch)\\\n .replace(\"$out\", catalog)\n logging.info(\"Patch: %s\"%cmd)\n subprocess.check_call(cmd, shell=True)\n shutil.rmtree(tmpdir, ignore_errors=True)", - "docstring": "Apply to the 'catalog' the changesets in the metafile list 'changesets" - }, - { - "code": "def join(prev, sep, *args, **kw):\n yield sep.join(prev, *args, **kw)", - "docstring": "alias of str.join" - }, - { - "code": "def copy_file(self, path, prefixed_path, source_storage):\n if prefixed_path in self.copied_files:\n return self.log(\"Skipping '%s' (already copied earlier)\" % path)\n if not self.delete_file(path, prefixed_path, source_storage):\n return\n source_path = source_storage.path(path)\n if self.dry_run:\n self.log(\"Pretending to copy '%s'\" % source_path, level=1)\n else:\n self.log(\"Copying '%s'\" % source_path, level=1)\n with source_storage.open(path) as source_file:\n self.storage.save(prefixed_path, source_file)\n self.copied_files.append(prefixed_path)", - "docstring": "Attempt to copy ``path`` with storage" - }, - { - "code": "def sys_fsync(self, fd):\n ret = 0\n try:\n self.files[fd].sync()\n except IndexError:\n ret = -errno.EBADF\n except FdError:\n ret = -errno.EINVAL\n return ret", - "docstring": "Synchronize a file's in-core state with that on disk." - }, - { - "code": "def handle_starttag(self, tag, attrs):\n if tag in self.mathml_elements:\n final_attr = \"\"\n for key, value in attrs:\n final_attr += ' {0}=\"{1}\"'.format(key, value)\n self.fed.append(\"<{0}{1}>\".format(tag, final_attr))", - "docstring": "Return representation of html start tag and attributes." - }, - { - "code": "def _get_params(target, param, dof):\n return [target.getParam(getattr(ode, 'Param{}{}'.format(param, s)))\n for s in ['', '2', '3'][:dof]]", - "docstring": "Get the given param from each of the DOFs for a joint." - }, - { - "code": "def yaml_config_reader(config_path):\n if not config_path.endswith(\".yaml\"):\n raise ValueError(\"Config file not yaml\")\n with open(config_path, 'r') as f:\n config = yaml.load(f)\n return config", - "docstring": "Reads yaml config file and returns auto-typed config_dict" - }, - { - "code": "def outputs_are_present(outputs):\n for o in outputs:\n if not o.value:\n continue\n if o.recursive:\n if not folder_exists(o.value):\n return False\n else:\n if not simple_pattern_exists_in_gcs(o.value):\n return False\n return True", - "docstring": "True if each output contains at least one file or no output specified." - }, - { - "code": "def _check_filters(song, include_filters=None, exclude_filters=None, all_includes=False, all_excludes=False):\n\tinclude = True\n\tif include_filters:\n\t\tif all_includes:\n\t\t\tif not all(field in song and _check_field_value(song[field], pattern) for field, pattern in include_filters):\n\t\t\t\tinclude = False\n\t\telse:\n\t\t\tif not any(field in song and _check_field_value(song[field], pattern) for field, pattern in include_filters):\n\t\t\t\tinclude = False\n\tif exclude_filters:\n\t\tif all_excludes:\n\t\t\tif all(field in song and _check_field_value(song[field], pattern) for field, pattern in exclude_filters):\n\t\t\t\tinclude = False\n\t\telse:\n\t\t\tif any(field in song and _check_field_value(song[field], pattern) for field, pattern in exclude_filters):\n\t\t\t\tinclude = False\n\treturn include", - "docstring": "Check a song metadata dict against a set of metadata filters." - }, - { - "code": "def _hyphens_to_dashes(self):\n problematic_hyphens = [(r'-([.,!)])', r'---\\1'),\n (r'(?<=\\d)-(?=\\d)', '--'),\n (r'(?<=\\s)-(?=\\s)', '---')]\n for problem_case in problematic_hyphens:\n self._regex_replacement(*problem_case)", - "docstring": "Transform hyphens to various kinds of dashes" - }, - { - "code": "def setup_actions(self):\n self.actionOpen.triggered.connect(self.on_open)\n self.actionNew.triggered.connect(self.on_new)\n self.actionSave.triggered.connect(self.on_save)\n self.actionSave_as.triggered.connect(self.on_save_as)\n self.actionQuit.triggered.connect(\n QtWidgets.QApplication.instance().quit)\n self.tabWidget.current_changed.connect(self.on_current_tab_changed)\n self.tabWidget.last_tab_closed.connect(self.on_last_tab_closed)\n self.actionAbout.triggered.connect(self.on_about)\n self.actionRun.triggered.connect(self.on_run)\n self.interactiveConsole.process_finished.connect(\n self.on_process_finished)\n self.actionConfigure_run.triggered.connect(self.on_configure_run)", - "docstring": "Connects slots to signals" - }, - { - "code": "def wait(timeout=300):\n if env():\n cij.err(\"cij.ssh.wait: Invalid SSH environment\")\n return 1\n timeout_backup = cij.ENV.get(\"SSH_CMD_TIMEOUT\")\n try:\n time_start = time.time()\n cij.ENV[\"SSH_CMD_TIMEOUT\"] = \"3\"\n while True:\n time_current = time.time()\n if (time_current - time_start) > timeout:\n cij.err(\"cij.ssh.wait: Timeout\")\n return 1\n status, _, _ = command([\"exit\"], shell=True, echo=False)\n if not status:\n break\n cij.info(\"cij.ssh.wait: Time elapsed: %d seconds\" % (time_current - time_start))\n finally:\n if timeout_backup is None:\n del cij.ENV[\"SSH_CMD_TIMEOUT\"]\n else:\n cij.ENV[\"SSH_CMD_TIMEOUT\"] = timeout_backup\n return 0", - "docstring": "Wait util target connected" - }, - { - "code": "def _resolve_srv(self):\n resolver = self.settings[\"dns_resolver\"]\n self._set_state(\"resolving-srv\")\n self.event(ResolvingSRVEvent(self._dst_name, self._dst_service))\n resolver.resolve_srv(self._dst_name, self._dst_service, \"tcp\",\n callback = self._got_srv)", - "docstring": "Start resolving the SRV record." - }, - { - "code": "def environment_name_for_cname(self, env_cname):\n envs = self.get_environments()\n for env in envs:\n if env['Status'] != 'Terminated' \\\n and 'CNAME' in env \\\n and env['CNAME'] \\\n and env['CNAME'].lower().startswith(env_cname.lower() + '.'):\n return env['EnvironmentName']\n return None", - "docstring": "Returns an environment name for the given cname" - }, - { - "code": "def swatch(self, x, y, w=35, h=35, padding=0, roundness=0):\n for clr in self:\n clr.swatch(x, y, w, h, roundness)\n y += h + padding", - "docstring": "Rectangle swatches for all the colors in the list." - }, - { - "code": "def pkill():\n if env():\n return 1\n cmd = [\"ps -aux | grep fio | grep -v grep\"]\n status, _, _ = cij.ssh.command(cmd, shell=True, echo=False)\n if not status:\n status, _, _ = cij.ssh.command([\"pkill -f fio\"], shell=True)\n if status:\n return 1\n return 0", - "docstring": "Kill all of FIO processes" - }, - { - "code": "def contains(self, i):\n return self.start <= i.start and i.end <= self.end", - "docstring": "Returns true iff this interval contains the interval i" - }, - { - "code": "def Newline(loc=None):\n @llrule(loc, lambda parser: [\"newline\"])\n def rule(parser):\n result = parser._accept(\"newline\")\n if result is unmatched:\n return result\n return []\n return rule", - "docstring": "A rule that accepts token of kind ``newline`` and returns an empty list." - }, - { - "code": "def add_control_number(self, tag, value):\n record_add_field(self.record,\n tag,\n controlfield_value=value)", - "docstring": "Add a control-number 00x for given tag with value." - }, - { - "code": "def _get_digest(self):\n return hmac.new(\n self._secret, request.data, hashlib.sha1).hexdigest() if self._secret else None", - "docstring": "Return message digest if a secret key was provided" - }, - { - "code": "def list_move_to_front(l,value='other'):\n l=list(l)\n if value in l:\n l.remove(value)\n l.insert(0,value)\n return l", - "docstring": "if the value is in the list, move it to the front and return it." - }, - { - "code": "def round_teff_luminosity(cluster):\n temps = [round(t, -1) for t in teff(cluster)]\n lums = [round(l, 3) for l in luminosity(cluster)]\n return temps, lums", - "docstring": "Returns rounded teff and luminosity lists." - }, - { - "code": "def reverse(self):\n effect_args = ['reverse']\n self.effects.extend(effect_args)\n self.effects_log.append('reverse')\n return self", - "docstring": "Reverse the audio completely" - }, - { - "code": "def Match(pattern, s):\n if pattern not in _regexp_compile_cache:\n _regexp_compile_cache[pattern] = sre_compile.compile(pattern)\n return _regexp_compile_cache[pattern].match(s)", - "docstring": "Matches the string with the pattern, caching the compiled regexp." - }, - { - "code": "def consume_line(self, line):\n data = RE_VALUE_KEY.split(line.strip(), 1)\n if len(data) == 1:\n return float(data[0]), None\n else:\n return float(data[0]), data[1].strip()", - "docstring": "Consume data from a line." - }, - { - "code": "def variants_of(\n graph: BELGraph,\n node: Protein,\n modifications: Optional[Set[str]] = None,\n) -> Set[Protein]:\n if modifications:\n return _get_filtered_variants_of(graph, node, modifications)\n return {\n v\n for u, v, key, data in graph.edges(keys=True, data=True)\n if (\n u == node\n and data[RELATION] == HAS_VARIANT\n and pybel.struct.has_protein_modification(v)\n )\n }", - "docstring": "Returns all variants of the given node." - }, - { - "code": "def prohibit(self, data):\n for char in data:\n for lookup in self.prohibited:\n if lookup(char):\n raise StringprepError(\"Prohibited character: {0!r}\"\n .format(char))\n return data", - "docstring": "Checks for prohibited characters." - }, - { - "code": "def MoveToCenter(self) -> bool:\n if self.IsTopLevel():\n rect = self.BoundingRectangle\n screenWidth, screenHeight = GetScreenSize()\n x, y = (screenWidth - rect.width()) // 2, (screenHeight - rect.height()) // 2\n if x < 0: x = 0\n if y < 0: y = 0\n return SetWindowPos(self.NativeWindowHandle, SWP.HWND_Top, x, y, 0, 0, SWP.SWP_NoSize)\n return False", - "docstring": "Move window to screen center." - }, - { - "code": "def check_completion(self):\n terminate = False\n term_dict = self.get_termination_stats(get_cos=self.costol is not None)\n terminate |= np.all(np.abs(term_dict['delta_vals']) < self.paramtol)\n terminate |= (term_dict['delta_err'] < self.errtol)\n terminate |= (term_dict['exp_err'] < self.exptol)\n terminate |= (term_dict['frac_err'] < self.fractol)\n if self.costol is not None:\n terminate |= (curcos < term_dict['model_cosine'])\n return terminate", - "docstring": "Returns a Bool of whether the algorithm has found a satisfactory minimum" - }, - { - "code": "def generate():\n data_bytes = bytearray(random.getrandbits(8) for i in range(REQID.REQID_SIZE))\n return REQID(data_bytes)", - "docstring": "Generates a random REQID for request" - }, - { - "code": "def pwd_phasebin(phases, mags, binsize=0.002, minbin=9):\n bins = np.arange(0.0, 1.0, binsize)\n binnedphaseinds = npdigitize(phases, bins)\n binnedphases, binnedmags = [], []\n for x in npunique(binnedphaseinds):\n thisbin_inds = binnedphaseinds == x\n thisbin_phases = phases[thisbin_inds]\n thisbin_mags = mags[thisbin_inds]\n if thisbin_inds.size > minbin:\n binnedphases.append(npmedian(thisbin_phases))\n binnedmags.append(npmedian(thisbin_mags))\n return np.array(binnedphases), np.array(binnedmags)", - "docstring": "This bins the phased mag series using the given binsize." - }, - { - "code": "def prettify(self, elem):\n from xml.etree import ElementTree\n from re import sub\n rawString = ElementTree.tostring(elem, 'utf-8')\n parsedString = sub(r'(?=<[^/].*>)', '\\n', rawString)\n return parsedString[1:]", - "docstring": "Parse xml elements for pretty printing" - }, - { - "code": "def _decode_error(self):\n error_qname = self._ns_prefix + \"error\"\n for child in self._element:\n if child.tag == error_qname:\n self._error = StanzaErrorElement(child)\n return\n raise BadRequestProtocolError(\"Error element missing in\"\n \" an error stanza\")", - "docstring": "Decode error element of the stanza." - }, - { - "code": "def CheckForCopyright(filename, lines, error):\n for line in range(1, min(len(lines), 11)):\n if re.search(r'Copyright', lines[line], re.I): break\n else:\n error(filename, 0, 'legal/copyright', 5,\n 'No copyright message found. '\n 'You should have a line: \"Copyright [year] \"')", - "docstring": "Logs an error if no Copyright message appears at the top of the file." - }, - { - "code": "def figure(self,forceNew=False):\n if plt._pylab_helpers.Gcf.get_num_fig_managers()>0 and forceNew is False:\n self.log.debug(\"figure already seen, not creating one.\")\n return\n if self.subplot:\n self.log.debug(\"subplot mode enabled, not creating new figure\")\n else:\n self.log.debug(\"creating new figure\")\n plt.figure(figsize=(self.figure_width,self.figure_height))", - "docstring": "make sure a figure is ready." - }, - { - "code": "def _glob_match(self, pattern, string):\n return bool(re.match(fnmatch.translate(pattern), string,\n re.M | re.U | re.L))", - "docstring": "Match given string, by escaping regex characters" - }, - { - "code": "def error(msg):\n _flush()\n sys.stderr.write(\"\\033[1;37;41mERROR: {}\\033[0m\\n\".format(msg))\n sys.stderr.flush()", - "docstring": "Emit an error message to stderr." - }, - { - "code": "def SetActive(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:\n if self.IsTopLevel():\n handle = self.NativeWindowHandle\n if IsIconic(handle):\n ret = ShowWindow(handle, SW.Restore)\n elif not IsWindowVisible(handle):\n ret = ShowWindow(handle, SW.Show)\n ret = SetForegroundWindow(handle)\n time.sleep(waitTime)\n return ret\n return False", - "docstring": "Set top level window active." - }, - { - "code": "def settings(self):\n for table in self.tables:\n if isinstance(table, SettingTable):\n for statement in table.statements:\n yield statement", - "docstring": "Generator which returns all of the statements in all of the settings tables" - }, - { - "code": "def _delocalize_outputs_commands(self, task_dir, outputs, user_project):\n commands = []\n for o in outputs:\n if o.recursive or not o.value:\n continue\n dest_path = o.uri.path\n local_path = task_dir + '/' + _DATA_SUBDIR + '/' + o.docker_path\n if o.file_provider == job_model.P_LOCAL:\n commands.append('mkdir -p \"%s\"' % dest_path)\n if o.file_provider in [job_model.P_LOCAL, job_model.P_GCS]:\n if user_project:\n command = 'gsutil -u %s -mq cp \"%s\" \"%s\"' % (user_project, local_path,\n dest_path)\n else:\n command = 'gsutil -mq cp \"%s\" \"%s\"' % (local_path, dest_path)\n commands.append(command)\n return '\\n'.join(commands)", - "docstring": "Copy outputs from local disk to GCS." - }, - { - "code": "def _hook_callback(self, state, pc, instruction):\n 'Invoke all registered generic hooks'\n if issymbolic(pc):\n return\n for cb in self._hooks.get(pc, []):\n cb(state)\n for cb in self._hooks.get(None, []):\n cb(state)", - "docstring": "Invoke all registered generic hooks" - }, - { - "code": "def send_command(\n self, command, callback=True, command_type=QRTPacketType.PacketCommand\n ):\n if self.transport is not None:\n cmd_length = len(command)\n LOG.debug(\"S: %s\", command)\n self.transport.write(\n struct.pack(\n RTCommand % cmd_length,\n RTheader.size + cmd_length + 1,\n command_type.value,\n command.encode(),\n b\"\\0\",\n )\n )\n future = self.loop.create_future()\n if callback:\n self.request_queue.append(future)\n else:\n future.set_result(None)\n return future\n raise QRTCommandException(\"Not connected!\")", - "docstring": "Sends commands to QTM" - }, - { - "code": "def handle_map_doc(self, document):\n for function in sorted(self.functions.values(), key=lambda x: x[0]):\n try:\n yield [list(function(document))]\n except Exception, exc:\n yield []\n self.log(repr(exc))", - "docstring": "Return the mapping of a document according to the function list." - }, - { - "code": "def in_out_check(self):\r\n devices = available_devices()\r\n if not self.in_idx in devices:\r\n raise OSError(\"Input device is unavailable\")\r\n in_check = devices[self.in_idx]\r\n if not self.out_idx in devices:\r\n raise OSError(\"Output device is unavailable\")\r\n out_check = devices[self.out_idx]\r\n if((in_check['inputs'] == 0) and (out_check['outputs']==0)):\r\n raise StandardError('Invalid input and output devices')\r\n elif(in_check['inputs'] == 0):\r\n raise ValueError('Selected input device has no inputs')\r\n elif(out_check['outputs'] == 0):\r\n raise ValueError('Selected output device has no outputs')\r\n return True", - "docstring": "Checks the input and output to see if they are valid" - }, - { - "code": "def _set_tzdata(self, tzobj):\n for attr in _tzfile.attrs:\n setattr(self, '_' + attr, getattr(tzobj, attr))", - "docstring": "Set the time zone data of this object from a _tzfile object" - }, - { - "code": "def file_download_event_builder(event, sender_app, obj=None, **kwargs):\n event.update(dict(\n timestamp=datetime.datetime.utcnow().isoformat(),\n bucket_id=str(obj.bucket_id),\n file_id=str(obj.file_id),\n file_key=obj.key,\n size=obj.file.size,\n referrer=request.referrer,\n **get_user()\n ))\n return event", - "docstring": "Build a file-download event." - }, - { - "code": "def _clip(sid, prefix):\n return sid[len(prefix):] if sid.startswith(prefix) else sid", - "docstring": "Clips a prefix from the beginning of a string if it exists." - }, - { - "code": "async def copy_storage_object(self, source_bucket, source_key,\n bucket, key):\n info = await self.head_object(Bucket=source_bucket, Key=source_key)\n size = info['ContentLength']\n if size > MULTI_PART_SIZE:\n result = await _multipart_copy(self, source_bucket, source_key,\n bucket, key, size)\n else:\n result = await self.copy_object(\n Bucket=bucket, Key=key,\n CopySource=_source_string(source_bucket, source_key)\n )\n return result", - "docstring": "Copy a file from one bucket into another" - }, - { - "code": "def stats(self):\n nameordered = self.samples.keys()\n nameordered.sort()\n pd.options.display.max_rows = len(self.samples)\n statdat = pd.DataFrame([self.samples[i].stats for i in nameordered],\n index=nameordered).dropna(axis=1, how='all')\n for column in statdat:\n if column not in [\"hetero_est\", \"error_est\"]:\n statdat[column] = np.nan_to_num(statdat[column]).astype(int)\n return statdat", - "docstring": "Returns a data frame with Sample data and state." - }, - { - "code": "def prune(self, depth=0):\n for n in list(self.nodes):\n if len(n.links) <= depth:\n self.remove_node(n.id)", - "docstring": "Removes all nodes with less or equal links than depth." - }, - { - "code": "def submitEntry(self):\n mcclogs, physlogs = self.selectedLogs()\n success = True\n if mcclogs != []:\n if not self.acceptedUser(\"MCC\"):\n QMessageBox().warning(self, \"Invalid User\", \"Please enter a valid user name!\")\n return\n fileName = self.xmlSetup(\"MCC\", mcclogs)\n if fileName is None:\n return\n if not self.imagePixmap.isNull():\n self.prepareImages(fileName, \"MCC\")\n success = self.sendToLogbook(fileName, \"MCC\")\n if physlogs != []:\n for i in range(len(physlogs)):\n fileName = self.xmlSetup(\"Physics\", physlogs[i])\n if fileName is None:\n return\n if not self.imagePixmap.isNull():\n self.prepareImages(fileName, \"Physics\")\n success_phys = self.sendToLogbook(fileName, \"Physics\", physlogs[i])\n success = success and success_phys\n self.done(success)", - "docstring": "Process user inputs and subit logbook entry when user clicks Submit button" - }, - { - "code": "def logger(name=None, save=False):\n logger = logging.getLogger(name)\n if save:\n logformat = '%(asctime)s [%(levelname)s] [%(name)s] %(funcName)s: %(message)s (line %(lineno)d)'\n log_file_path = 'fut.log'\n open(log_file_path, 'w').write('')\n logger.setLevel(logging.DEBUG)\n logger_handler = logging.FileHandler(log_file_path)\n logger_handler.setFormatter(logging.Formatter(logformat))\n else:\n logger_handler = NullHandler()\n logger.addHandler(logger_handler)\n return logger", - "docstring": "Init and configure logger." - }, - { - "code": "def _parse_info(self, info_field):\n info = dict()\n for item in info_field.split(';'):\n info_item_data = item.split('=')\n if len(info_item_data) == 1:\n info[info_item_data[0]] = True\n elif len(info_item_data) == 2:\n info[info_item_data[0]] = info_item_data[1]\n return info", - "docstring": "Parse the VCF info field" - }, - { - "code": "def flush(self, line):\n sys.stdout.write(line)\n sys.stdout.flush()", - "docstring": "flush the line to stdout" - }, - { - "code": "def fix_remaining_type_comments(node):\n assert node.type == syms.file_input\n last_n = None\n for n in node.post_order():\n if last_n is not None:\n if n.type == token.NEWLINE and is_assignment(last_n):\n fix_variable_annotation_type_comment(n, last_n)\n elif n.type == syms.funcdef and last_n.type == syms.suite:\n fix_signature_annotation_type_comment(n, last_n, offset=1)\n elif n.type == syms.async_funcdef and last_n.type == syms.suite:\n fix_signature_annotation_type_comment(n, last_n, offset=2)\n last_n = n", - "docstring": "Converts type comments in `node` to proper annotated assignments." - }, - { - "code": "def economic_qs_zeros(n):\n Q0 = empty((n, 0))\n Q1 = eye(n)\n S0 = empty(0)\n return ((Q0, Q1), S0)", - "docstring": "Eigen decomposition of a zero matrix." - }, - { - "code": "def _update_type(self, params):\n dozscale = False\n particles = []\n for p in listify(params):\n typ, ind = self._p2i(p)\n particles.append(ind)\n dozscale = dozscale or typ == 'zscale'\n particles = set(particles)\n return dozscale, particles", - "docstring": "Returns dozscale and particle list of update" - }, - { - "code": "def getIDsFromFiles(files):\n if type(files) is str:\n files=glob.glob(files+\"/*.*\")\n IDs=[]\n for fname in files:\n if fname[-4:].lower()=='.abf':\n ext=fname.split('.')[-1]\n IDs.append(os.path.basename(fname).replace('.'+ext,''))\n return sorted(IDs)", - "docstring": "given a path or list of files, return ABF IDs." - }, - { - "code": "def registerDriver(iface, driver, class_implements=[]):\n for class_item in class_implements:\n classImplements(class_item, iface)\n component.provideAdapter(factory=driver, adapts=[iface], provides=IDriver)", - "docstring": "Register driver adapter used by page object" - }, - { - "code": "def _parse_geoms(self, **kwargs):\n bbox = kwargs.get('bbox', None)\n wkt_geom = kwargs.get('wkt', None)\n geojson = kwargs.get('geojson', None)\n if bbox is not None:\n g = box(*bbox)\n elif wkt_geom is not None:\n g = wkt.loads(wkt_geom)\n elif geojson is not None:\n g = shape(geojson)\n else:\n return None\n if self.proj is None:\n return g\n else:\n return self._reproject(g, from_proj=kwargs.get('from_proj', 'EPSG:4326'))", - "docstring": "Finds supported geometry types, parses them and returns the bbox" - }, - { - "code": "def check_output(self, make_ndx_output, message=None, err=None):\n if message is None:\n message = \"\"\n else:\n message = '\\n' + message\n def format(output, w=60):\n hrule = \"====[ GromacsError (diagnostic output) ]\".ljust(w,\"=\")\n return hrule + '\\n' + str(output) + hrule\n rc = True\n if self._is_empty_group(make_ndx_output):\n warnings.warn(\"Selection produced empty group.{message!s}\".format(**vars()), category=GromacsValueWarning)\n rc = False\n if self._has_syntax_error(make_ndx_output):\n rc = False\n out_formatted = format(make_ndx_output)\n raise GromacsError(\"make_ndx encountered a Syntax Error, \"\n \"%(message)s\\noutput:\\n%(out_formatted)s\" % vars())\n if make_ndx_output.strip() == \"\":\n rc = False\n out_formatted = format(err)\n raise GromacsError(\"make_ndx produced no output, \"\n \"%(message)s\\nerror output:\\n%(out_formatted)s\" % vars())\n return rc", - "docstring": "Simple tests to flag problems with a ``make_ndx`` run." - }, - { - "code": "async def play_now(self, requester: int, track: dict):\r\n self.add_next(requester, track)\r\n await self.play(ignore_shuffle=True)", - "docstring": "Add track and play it." - }, - { - "code": "def assign(self, var, val, assignment):\n \"Assign var, and keep track of conflicts.\"\n oldval = assignment.get(var, None)\n if val != oldval:\n if oldval is not None:\n self.record_conflict(assignment, var, oldval, -1)\n self.record_conflict(assignment, var, val, +1)\n CSP.assign(self, var, val, assignment)", - "docstring": "Assign var, and keep track of conflicts." - }, - { - "code": "def _set_debug_dict(__loglevel__):\n _lconfig.dictConfig({\n 'version': 1,\n 'disable_existing_loggers': False,\n 'formatters': {\n 'standard': {\n 'format': \"%(asctime)s \\t\"\\\n +\"pid=%(process)d \\t\"\\\n +\"[%(filename)s]\\t\"\\\n +\"%(levelname)s \\t\"\\\n +\"%(message)s\"\n },\n },\n 'handlers': {\n __name__: {\n 'level':__loglevel__,\n 'class':'logging.FileHandler',\n 'filename':__debugfile__,\n 'formatter':\"standard\",\n 'mode':'a+'\n }\n },\n 'loggers':{\n __name__: {\n 'handlers': [__name__],\n 'level': __loglevel__,\n 'propogate': True\n }\n }\n })", - "docstring": "set the debug dict" - }, - { - "code": "def getDescription(self):\n description = {'name':self.name, 'fields':[f.name for f in self.fields], \\\n 'numRecords by field':[f.numRecords for f in self.fields]}\n return description", - "docstring": "Returns a description of the dataset" - }, - { - "code": "def trun_emph(trun):\n if trun[\"conf\"][\"VERBOSE\"] > 1:\n cij.emph(\"rnr:CONF {\")\n for cvar in sorted(trun[\"conf\"].keys()):\n cij.emph(\" % 16s: %r\" % (cvar, trun[\"conf\"][cvar]))\n cij.emph(\"}\")\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:INFO {\")\n cij.emph(\" OUTPUT: %r\" % trun[\"conf\"][\"OUTPUT\"])\n cij.emph(\" yml_fpath: %r\" % yml_fpath(trun[\"conf\"][\"OUTPUT\"]))\n cij.emph(\"}\")", - "docstring": "Print essential info on" - }, - { - "code": "def extender(self, edge):\n \"See what edges can be extended by this edge.\"\n (j, k, B, _, _) = edge\n for (i, j, A, alpha, B1b) in self.chart[j]:\n if B1b and B == B1b[0]:\n self.add_edge([i, k, A, alpha + [edge], B1b[1:]])", - "docstring": "See what edges can be extended by this edge." - }, - { - "code": "def _countmatrix(lxs):\n share = np.zeros((lxs.shape[0], lxs.shape[0]))\n names = range(lxs.shape[0])\n for row in lxs:\n for samp1, samp2 in itertools.combinations(names, 2):\n shared = lxs[samp1, lxs[samp2] > 0].sum()\n share[samp1, samp2] = shared\n for row in xrange(len(names)):\n share[row, row] = lxs[row].sum()\n return share", - "docstring": "fill a matrix with pairwise data sharing" - }, - { - "code": "def parse_filename(self, filepath):\n name = os.path.basename(filepath)[:-src_ext_len]\n try:\n dt = datetime.strptime(name, \"%Y-%m-%d-%H-%M\")\n except ValueError:\n raise PostNameInvalid\n return {'name': name, 'datetime': dt, 'filepath': filepath}", - "docstring": "parse post source files name to datetime object" - }, - { - "code": "def _addPartitionId(self, index, partitionId=None):\n if partitionId is None:\n self._partitionIdList.append(numpy.inf)\n else:\n self._partitionIdList.append(partitionId)\n indices = self._partitionIdMap.get(partitionId, [])\n indices.append(index)\n self._partitionIdMap[partitionId] = indices", - "docstring": "Adds partition id for pattern index" - }, - { - "code": "def _dot_to_dec(ip, check=True):\n if check and not is_dot(ip):\n raise ValueError('_dot_to_dec: invalid IP: \"%s\"' % ip)\n octets = str(ip).split('.')\n dec = 0\n dec |= int(octets[0]) << 24\n dec |= int(octets[1]) << 16\n dec |= int(octets[2]) << 8\n dec |= int(octets[3])\n return dec", - "docstring": "Dotted decimal notation to decimal conversion." - }, - { - "code": "def figure_sweeps(self, offsetX=0, offsetY=0):\n self.log.debug(\"creating overlayed sweeps plot\")\n self.figure()\n for sweep in range(self.abf.sweeps):\n self.abf.setsweep(sweep)\n self.setColorBySweep()\n plt.plot(self.abf.sweepX2+sweep*offsetX,\n self.abf.sweepY+sweep*offsetY,\n **self.kwargs)\n if offsetX:\n self.marginX=.05\n self.decorate()", - "docstring": "plot every sweep of an ABF file." - }, - { - "code": "def start(self):\r\n super(JupyterTensorboardApp, self).start()\r\n subcmds = \", \".join(sorted(self.subcommands))\r\n sys.exit(\"Please supply at least one subcommand: %s\" % subcmds)", - "docstring": "Perform the App's actions as configured" - }, - { - "code": "def _getInputValue(self, obj, fieldName):\n if isinstance(obj, dict):\n if not fieldName in obj:\n knownFields = \", \".join(\n key for key in obj.keys() if not key.startswith(\"_\")\n )\n raise ValueError(\n \"Unknown field name '%s' in input record. Known fields are '%s'.\\n\"\n \"This could be because input headers are mislabeled, or because \"\n \"input data rows do not contain a value for '%s'.\" % (\n fieldName, knownFields, fieldName\n )\n )\n return obj[fieldName]\n else:\n return getattr(obj, fieldName)", - "docstring": "Gets the value of a given field from the input record" - }, - { - "code": "def locked_delete(self):\n query = {self.key_name: self.key_value}\n self.model_class.objects.filter(**query).delete()", - "docstring": "Delete Credentials from the datastore." - }, - { - "code": "def parse(filename):\n for event, elt in et.iterparse(filename, events= ('start', 'end', 'comment', 'pi'), huge_tree=True):\n if event == 'start':\n obj = _elt2obj(elt)\n obj['type'] = ENTER\n yield obj\n if elt.text:\n yield {'type': TEXT, 'text': elt.text}\n elif event == 'end':\n yield {'type': EXIT}\n if elt.tail:\n yield {'type': TEXT, 'text': elt.tail}\n elt.clear()\n elif event == 'comment':\n yield {'type': COMMENT, 'text': elt.text}\n elif event == 'pi':\n yield {'type': PI, 'text': elt.text}\n else:\n assert False, (event, elt)", - "docstring": "Parses file content into events stream" - }, - { - "code": "async def update_info(self, *_):\n query = gql(\n )\n res = await self._execute(query)\n if res is None:\n return\n errors = res.get(\"errors\", [])\n if errors:\n msg = errors[0].get(\"message\", \"failed to login\")\n _LOGGER.error(msg)\n raise InvalidLogin(msg)\n data = res.get(\"data\")\n if not data:\n return\n viewer = data.get(\"viewer\")\n if not viewer:\n return\n self._name = viewer.get(\"name\")\n homes = viewer.get(\"homes\", [])\n self._home_ids = []\n for _home in homes:\n home_id = _home.get(\"id\")\n self._all_home_ids += [home_id]\n subs = _home.get(\"subscriptions\")\n if subs:\n status = subs[0].get(\"status\", \"ended\").lower()\n if not home_id or status != \"running\":\n continue\n self._home_ids += [home_id]", - "docstring": "Update home info async." - }, - { - "code": "def mongo(daemon=False, port=20771):\n cmd = \"mongod --port {0}\".format(port)\n if daemon:\n cmd += \" --fork\"\n run(cmd)", - "docstring": "Run the mongod process." - }, - { - "code": "def _average(self):\n r, g, b, a = 0, 0, 0, 0\n for clr in self:\n r += clr.r\n g += clr.g\n b += clr.b\n a += clr.alpha\n r /= len(self)\n g /= len(self)\n b /= len(self)\n a /= len(self)\n return color(r, g, b, a, mode=\"rgb\")", - "docstring": "Returns one average color for the colors in the list." - }, - { - "code": "def print_layers(self):\n for i, layer in enumerate(self.all_layers):\n logging.info(\n \" layer {:3}: {:20} {:15} {}\".format(i, layer.name, str(layer.get_shape()), layer.dtype.name)\n )", - "docstring": "Print all info of layers in the network." - }, - { - "code": "def unregister_watch(self, uid):\n Log.info(\"Unregister a watch with uid: \" + str(uid))\n self.watches.pop(uid, None)", - "docstring": "Unregister the watch with the given UUID." - }, - { - "code": "def multi_segment(annotation, sr=22050, length=None, **kwargs):\n PENT = [1, 32./27, 4./3, 3./2, 16./9]\n DURATION = 0.1\n h_int, _ = hierarchy_flatten(annotation)\n if length is None:\n length = int(sr * (max(np.max(_) for _ in h_int) + 1. / DURATION) + 1)\n y = 0.0\n for ints, (oc, scale) in zip(h_int, product(range(3, 3 + len(h_int)),\n PENT)):\n click = mkclick(440.0 * scale * oc, sr=sr, duration=DURATION)\n y = y + filter_kwargs(mir_eval.sonify.clicks,\n np.unique(ints),\n fs=sr, length=length,\n click=click)\n return y", - "docstring": "Sonify multi-level segmentations" - }, - { - "code": "def write(self, data, sections=None):\n if self.error[0]:\n self.status = self.error[0]\n data = b(self.error[1])\n if not self.headers_sent:\n self.send_headers(data, sections)\n if self.request_method != 'HEAD':\n try:\n if self.chunked:\n self.conn.sendall(b('%x\\r\\n%s\\r\\n' % (len(data), data)))\n else:\n self.conn.sendall(data)\n except socket.timeout:\n self.closeConnection = True\n except socket.error:\n self.closeConnection = True", - "docstring": "Write the data to the output socket." - }, - { - "code": "def invalidate_cache(cpu, address, size):\n cache = cpu.instruction_cache\n for offset in range(size):\n if address + offset in cache:\n del cache[address + offset]", - "docstring": "remove decoded instruction from instruction cache" - }, - { - "code": "def create_snippet(self, name, body, timeout=None):\n payload = {\n 'name': name,\n 'body': body\n }\n return self._api_request(\n self.SNIPPETS_ENDPOINT,\n self.HTTP_POST,\n payload=payload,\n timeout=timeout\n )", - "docstring": "API call to create a Snippet" - }, - { - "code": "def setup_stanza_handlers(self, handler_objects, usage_restriction):\n iq_handlers = {\"get\": {}, \"set\": {}}\n message_handlers = []\n presence_handlers = []\n for obj in handler_objects:\n if not isinstance(obj, XMPPFeatureHandler):\n continue\n obj.stanza_processor = self\n for dummy, handler in inspect.getmembers(obj, callable):\n if not hasattr(handler, \"_pyxmpp_stanza_handled\"):\n continue\n element_name, stanza_type = handler._pyxmpp_stanza_handled\n restr = handler._pyxmpp_usage_restriction\n if restr and restr != usage_restriction:\n continue\n if element_name == \"iq\":\n payload_class = handler._pyxmpp_payload_class_handled\n payload_key = handler._pyxmpp_payload_key\n if (payload_class, payload_key) in iq_handlers[stanza_type]:\n continue\n iq_handlers[stanza_type][(payload_class, payload_key)] = \\\n handler\n continue\n elif element_name == \"message\":\n handler_list = message_handlers\n elif element_name == \"presence\":\n handler_list = presence_handlers\n else:\n raise ValueError, \"Bad handler decoration\"\n handler_list.append(handler)\n with self.lock:\n self._iq_handlers = iq_handlers\n self._presence_handlers = presence_handlers\n self._message_handlers = message_handlers", - "docstring": "Install stanza handlers provided by `handler_objects`" - }, - { - "code": "def compare(buf_a, buf_b, ignore):\n for field in getattr(buf_a, '_fields_'):\n name, types = field[0], field[1]\n if name in ignore:\n continue\n val_a = getattr(buf_a, name)\n val_b = getattr(buf_b, name)\n if isinstance(types, (type(Union), type(Structure))):\n if compare(val_a, val_b, ignore):\n return 1\n elif isinstance(types, type(Array)):\n for i, _ in enumerate(val_a):\n if isinstance(types, (type(Union), type(Structure))):\n if compare(val_a[i], val_b[i], ignore):\n return 1\n else:\n if val_a[i] != val_b[i]:\n return 1\n else:\n if val_a != val_b:\n return 1\n return 0", - "docstring": "Compare of two Buffer item" - }, - { - "code": "def make_quaternion(theta, *axis):\n x, y, z = axis\n r = np.sqrt(x * x + y * y + z * z)\n st = np.sin(theta / 2.)\n ct = np.cos(theta / 2.)\n return [x * st / r, y * st / r, z * st / r, ct]", - "docstring": "Given an angle and an axis, create a quaternion." - }, - { - "code": "def _set_params(target, param, values, dof):\n if not isinstance(values, (list, tuple, np.ndarray)):\n values = [values] * dof\n assert dof == len(values)\n for s, value in zip(['', '2', '3'][:dof], values):\n target.setParam(getattr(ode, 'Param{}{}'.format(param, s)), value)", - "docstring": "Set the given param for each of the DOFs for a joint." - }, - { - "code": "def contact(request):\n form = ContactForm(request.POST or None)\n if form.is_valid():\n subject = form.cleaned_data['subject']\n message = form.cleaned_data['message']\n sender = form.cleaned_data['sender']\n cc_myself = form.cleaned_data['cc_myself']\n recipients = settings.CONTACTFORM_RECIPIENTS\n if cc_myself:\n recipients.append(sender)\n send_mail(getattr(settings, \"CONTACTFORM_SUBJECT_PREFIX\", '') + subject, message, sender, recipients)\n return render(request, 'contactform/thanks.html')\n return render( request, 'contactform/contact.html', {'form': form})", - "docstring": "Displays the contact form and sends the email" - }, - { - "code": "def Maximize(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:\n if self.IsTopLevel():\n return self.ShowWindow(SW.ShowMaximized, waitTime)\n return False", - "docstring": "Set top level window maximize." - }, - { - "code": "def round_arr_teff_luminosity(arr):\n arr['temp'] = np.around(arr['temp'], -1)\n arr['lum'] = np.around(arr['lum'], 3)\n return arr", - "docstring": "Return the numpy array with rounded teff and luminosity columns." - }, - { - "code": "def reload_config(self, call_params):\n path = '/' + self.api_version + '/ReloadConfig/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Reload Plivo Config helper" - }, - { - "code": "def clone(self):\n t = Tag(self.version.major, self.version.minor, self.version.patch)\n if self.revision is not None:\n t.revision = self.revision.clone()\n return t", - "docstring": "Returns a copy of this object" - }, - { - "code": "def notify(self, event_id):\n self._event_buffer.extend([event_id])\n self._event_count += 1\n if self._event_count >= self.threshold:\n logger.debug(\"Eventcount >= threshold\")\n self.make_callback(kind=\"event\")", - "docstring": "Let the FlowControl system know that there is an event." - }, - { - "code": "def _filter_by_zoom(element=None, conf_string=None, zoom=None):\n for op_str, op_func in [\n (\"=\", operator.eq),\n (\"<=\", operator.le),\n (\">=\", operator.ge),\n (\"<\", operator.lt),\n (\">\", operator.gt),\n ]:\n if conf_string.startswith(op_str):\n return element if op_func(zoom, _strip_zoom(conf_string, op_str)) else None", - "docstring": "Return element only if zoom condition matches with config string." - }, - { - "code": "def check_bidi(data):\n has_l = False\n has_ral = False\n for char in data:\n if stringprep.in_table_d1(char):\n has_ral = True\n elif stringprep.in_table_d2(char):\n has_l = True\n if has_l and has_ral:\n raise StringprepError(\"Both RandALCat and LCat characters present\")\n if has_ral and (not stringprep.in_table_d1(data[0])\n or not stringprep.in_table_d1(data[-1])):\n raise StringprepError(\"The first and the last character must\"\n \" be RandALCat\")\n return data", - "docstring": "Checks if sting is valid for bidirectional printing." - }, - { - "code": "def discover(cls, *args, **kwargs):\n file = os.path.join(Cache.cache_dir, Cache.cache_name)\n return cls.from_file(file, *args, **kwargs)", - "docstring": "Make a guess about the cache file location an try loading it." - }, - { - "code": "def passcode(callsign):\n assert isinstance(callsign, str)\n callsign = callsign.split('-')[0].upper()\n code = 0x73e2\n for i, char in enumerate(callsign):\n code ^= ord(char) << (8 if not i % 2 else 0)\n return code & 0x7fff", - "docstring": "Takes a CALLSIGN and returns passcode" - }, - { - "code": "def _get_mount_actions(self, mounts, mnt_datadisk):\n actions_to_add = []\n for mount in mounts:\n bucket = mount.value[len('gs://'):]\n mount_path = mount.docker_path\n actions_to_add.extend([\n google_v2_pipelines.build_action(\n name='mount-{}'.format(bucket),\n flags=['ENABLE_FUSE', 'RUN_IN_BACKGROUND'],\n image_uri=_GCSFUSE_IMAGE,\n mounts=[mnt_datadisk],\n commands=[\n '--implicit-dirs', '--foreground', '-o ro', bucket,\n os.path.join(providers_util.DATA_MOUNT_POINT, mount_path)\n ]),\n google_v2_pipelines.build_action(\n name='mount-wait-{}'.format(bucket),\n flags=['ENABLE_FUSE'],\n image_uri=_GCSFUSE_IMAGE,\n mounts=[mnt_datadisk],\n commands=[\n 'wait',\n os.path.join(providers_util.DATA_MOUNT_POINT, mount_path)\n ])\n ])\n return actions_to_add", - "docstring": "Returns a list of two actions per gcs bucket to mount." - }, - { - "code": "def rehome(old, new, struct):\n if old == new:\n return\n if isinstance(struct, list):\n for item in struct:\n rehome(old, new, item)\n elif isinstance(struct, dict):\n for key, val in struct.iteritems():\n if isinstance(val, (dict, list)):\n rehome(old, new, val)\n elif \"conf\" in key:\n continue\n elif \"orig\" in key:\n continue\n elif \"root\" in key or \"path\" in key:\n struct[key] = struct[key].replace(old, new)", - "docstring": "Replace all absolute paths to \"re-home\" it" - }, - { - "code": "def print_big_dir_and_big_file(self, top_n=5):\n self.assert_is_dir_and_exists()\n size_table1 = sorted(\n [(p, p.dirsize) for p in self.select_dir(recursive=False)],\n key=lambda x: x[1],\n reverse=True,\n )\n for p1, size1 in size_table1[:top_n]:\n print(\"{:<9} {:<9}\".format(repr_data_size(size1), p1.abspath))\n size_table2 = sorted(\n [(p, p.size) for p in p1.select_file(recursive=True)],\n key=lambda x: x[1],\n reverse=True,\n )\n for p2, size2 in size_table2[:top_n]:\n print(\" {:<9} {:<9}\".format(\n repr_data_size(size2), p2.abspath))", - "docstring": "Print ``top_n`` big dir and ``top_n`` big file in each dir." - }, - { - "code": "def find_next(self):\n if int(self.num_retries) < 0:\n self._cnt_retries += 1\n sleeptime = (self._cnt_retries - 1) * 2 if self._cnt_retries < 10 else 10\n if sleeptime:\n log.warning(\n \"Lost connection to node during rpcexec(): %s (%d/%d) \"\n % (self.url, self._cnt_retries, self.num_retries)\n + \"Retrying in %d seconds\" % sleeptime\n )\n sleep(sleeptime)\n return next(self.urls)\n urls = [\n k\n for k, v in self._url_counter.items()\n if (\n int(self.num_retries) >= 0\n and v <= self.num_retries\n and (k != self.url or len(self._url_counter) == 1)\n )\n ]\n if not len(urls):\n raise NumRetriesReached\n url = urls[0]\n return url", - "docstring": "Find the next url in the list" - }, - { - "code": "def _generalized_word_starts(self, xs):\n self.word_starts = []\n i = 0\n for n in range(len(xs)):\n self.word_starts.append(i)\n i += len(xs[n]) + 1", - "docstring": "Helper method returns the starting indexes of strings in GST" - }, - { - "code": "def isMine(self, scriptname):\n suffix = os.path.splitext(scriptname)[1].lower()\n if suffix.startswith('.'):\n suffix = suffix[1:]\n return self.suffix == suffix", - "docstring": "Primitive queuing system detection; only looks at suffix at the moment." - }, - { - "code": "def _to_gen_(iterable):\n from collections import Iterable\n for elm in iterable:\n if isinstance(elm, Iterable) and not isinstance(elm, (str, bytes)):\n yield from flatten(elm)\n else: yield elm", - "docstring": "Recursively iterate lists and tuples" - }, - { - "code": "def child_added(self, child):\n if child.widget:\n self.parent().init_info_window_adapter()\n super(AndroidMapMarker, self).child_added(child)", - "docstring": "If a child is added we have to make sure the map adapter exists" - }, - { - "code": "def value_from_datadict(self, *args, **kwargs):\n value = super(RichTextWidget, self).value_from_datadict(\n *args, **kwargs)\n if value is not None:\n value = self.get_sanitizer()(value)\n return value", - "docstring": "Pass the submitted value through the sanitizer before returning it." - }, - { - "code": "def _randbelow(self, n):\n k = _int_bit_length(n)\n r = self.getrandbits(k)\n while r >= n:\n r = self.getrandbits(k)\n return r", - "docstring": "Return a random int in the range [0,n)." - }, - { - "code": "def _publish_instruction_as_executed(self, insn):\n self._icount += 1\n self._publish('did_execute_instruction', self._last_pc, self.PC, insn)", - "docstring": "Notify listeners that an instruction has been executed." - }, - { - "code": "def forward(self, input):\n return th.nn.functional.linear(input, self.weight.div(self.weight.pow(2).sum(0).sqrt()))", - "docstring": "Feed-forward through the network." - }, - { - "code": "def _get_param_names(self):\n template = Template(self.yaml_string)\n names = ['yaml_string']\n for match in re.finditer(template.pattern, template.template):\n name = match.group('named') or match.group('braced')\n assert name is not None\n names.append(name)\n return names", - "docstring": "Get mappable parameters from YAML." - }, - { - "code": "def _add_document(self, doc_id, conn=None, nosave=False, score=1.0, payload=None,\n replace=False, partial=False, language=None, **fields):\n if conn is None:\n conn = self.redis\n if partial:\n replace = True\n args = [self.ADD_CMD, self.index_name, doc_id, score]\n if nosave:\n args.append('NOSAVE')\n if payload is not None:\n args.append('PAYLOAD')\n args.append(payload)\n if replace:\n args.append('REPLACE')\n if partial:\n args.append('PARTIAL')\n if language:\n args += ['LANGUAGE', language]\n args.append('FIELDS')\n args += list(itertools.chain(*fields.items()))\n return conn.execute_command(*args)", - "docstring": "Internal add_document used for both batch and single doc indexing" - }, - { - "code": "def leave(self):\n if self.joined:\n p=MucPresence(to_jid=self.room_jid,stanza_type=\"unavailable\")\n self.manager.stream.send(p)", - "docstring": "Send a leave request for the room." - }, - { - "code": "def get(self, ring, angle):\n pixel = self.angleToPixel(angle, ring)\n return self._get_base(pixel)", - "docstring": "Get RGB color tuple of color at index pixel" - }, - { - "code": "def formset_valid(self, formset):\n self.object_list = formset.save()\n return super(ModelFormSetMixin, self).formset_valid(formset)", - "docstring": "If the formset is valid, save the associated models." - }, - { - "code": "def edge(s, path, edge, alpha=1.0):\n path.moveto(edge.node1.x, edge.node1.y)\n if edge.node2.style == BACK:\n path.curveto(\n edge.node1.x,\n edge.node2.y,\n edge.node2.x,\n edge.node2.y,\n edge.node2.x,\n edge.node2.y,\n ) \n else:\n path.lineto(\n edge.node2.x, \n edge.node2.y\n )", - "docstring": "Visualization of a single edge between two nodes." - }, - { - "code": "def removeLayout(self, layout):\n for cnt in reversed(range(layout.count())):\n item = layout.takeAt(cnt)\n widget = item.widget()\n if widget is not None:\n widget.deleteLater()\n else:\n self.removeLayout(item.layout())", - "docstring": "Iteratively remove graphical objects from layout." - }, - { - "code": "def next(self, length):\n return Segment(self.strip, length, self.offset + self.length)", - "docstring": "Return a new segment starting right after self in the same buffer." - }, - { - "code": "def record_strip_empty_volatile_subfields(rec):\n for tag in rec.keys():\n for field in rec[tag]:\n field[0][:] = [subfield for subfield in field[0]\n if subfield[1][:9] != \"VOLATILE:\"]", - "docstring": "Remove unchanged volatile subfields from the record." - }, - { - "code": "def string_repr(s):\n if compat.is_bytes(s):\n res = \"{!r}: \".format(s)\n for b in s:\n if type(b) is str:\n b = ord(b)\n res += \"%02x \" % b\n return res\n return \"{}\".format(s)", - "docstring": "Return a string as hex dump." - }, - { - "code": "def get(cls, group, admin):\n try:\n ga = cls.query.filter_by(\n group=group, admin_id=admin.get_id(),\n admin_type=resolve_admin_type(admin)).one()\n return ga\n except Exception:\n return None", - "docstring": "Get specific GroupAdmin object." - }, - { - "code": "def disconnect(self):\n with self.lock:\n if self.stream:\n if self.settings[u\"initial_presence\"]:\n self.send(Presence(stanza_type = \"unavailable\"))\n self.stream.disconnect()", - "docstring": "Gracefully disconnect from the server." - }, - { - "code": "def handle_transmission_error(self, learner_data, request_exception):\n try:\n sys_msg = request_exception.response.content\n except AttributeError:\n sys_msg = 'Not available'\n LOGGER.error(\n (\n 'Failed to send completion status call for enterprise enrollment %s'\n 'with payload %s'\n '\\nError message: %s'\n '\\nSystem message: %s'\n ),\n learner_data.enterprise_course_enrollment_id,\n learner_data,\n str(request_exception),\n sys_msg\n )", - "docstring": "Handle the case where the transmission fails." - }, - { - "code": "def _get_user_class(self, name):\r\n self._user_classes.setdefault(name, _make_user_class(self, name))\r\n return self._user_classes[name]", - "docstring": "Get or create a user class of the given type." - }, - { - "code": "def walk_up(start_dir, depth=20):\n root = start_dir\n for i in xrange(depth):\n contents = os.listdir(root)\n subdirs, files = [], []\n for f in contents:\n if os.path.isdir(os.path.join(root, f)):\n subdirs.append(f)\n else:\n files.append(f)\n yield root, subdirs, files\n parent = os.path.dirname(root)\n if parent and not parent == root:\n root = parent\n else:\n break", - "docstring": "Walk up a directory tree" - }, - { - "code": "def _setPath(cls):\n cls._path = os.path.join(os.environ['NTA_DYNAMIC_CONF_DIR'],\n cls.customFileName)", - "docstring": "Sets the path of the custom configuration file" - }, - { - "code": "async def modify_otr_status(self, modify_otr_status_request):\n response = hangouts_pb2.ModifyOTRStatusResponse()\n await self._pb_request('conversations/modifyotrstatus',\n modify_otr_status_request, response)\n return response", - "docstring": "Enable or disable message history in a conversation." - }, - { - "code": "def wrap_deepmind_retro(env, scale=True, frame_stack=4):\n env = WarpFrame(env)\n env = ClipRewardEnv(env)\n if frame_stack > 1:\n env = FrameStack(env, frame_stack)\n if scale:\n env = ScaledFloatFrame(env)\n return env", - "docstring": "Configure environment for retro games, using config similar to DeepMind-style Atari in wrap_deepmind" - }, - { - "code": "def lunch(self, message=\"Time for lunch\", shout: bool = False):\n return self.helper.output(message, shout)", - "docstring": "Say something in the afternoon" - }, - { - "code": "def k_in_row(self, board, move, player, (delta_x, delta_y)):\n \"Return true if there is a line through move on board for player.\"\n x, y = move\n n = 0\n while board.get((x, y)) == player:\n n += 1\n x, y = x + delta_x, y + delta_y\n x, y = move\n while board.get((x, y)) == player:\n n += 1\n x, y = x - delta_x, y - delta_y\n n -= 1\n return n >= self.k", - "docstring": "Return true if there is a line through move on board for player." - }, - { - "code": "def _inactivate_organization(organization):\n [_inactivate_organization_course_relationship(record) for record\n in internal.OrganizationCourse.objects.filter(organization_id=organization.id, active=True)]\n [_inactivate_record(record) for record\n in internal.Organization.objects.filter(id=organization.id, active=True)]", - "docstring": "Inactivates an activated organization as well as any active relationships" - }, - { - "code": "def _hook_syscall(self, uc, data):\n logger.debug(f\"Stopping emulation at {hex(uc.reg_read(self._to_unicorn_id('RIP')))} to perform syscall\")\n self.sync_unicorn_to_manticore()\n from ..native.cpu.abstractcpu import Syscall\n self._to_raise = Syscall()\n uc.emu_stop()", - "docstring": "Unicorn hook that transfers control to Manticore so it can execute the syscall" - }, - { - "code": "def available(self, timeout=5):\n host = self._connect_args['host']\n port = self._connect_args['port']\n try:\n sock = socket.create_connection((host, port), timeout=timeout)\n sock.close()\n return True\n except socket.error:\n pass\n return False", - "docstring": "Returns True if database server is running, False otherwise." - }, - { - "code": "def extend(self, item):\n if self.meta_type == 'dict':\n raise AssertionError('Cannot extend to object of `dict` base type!')\n if self.meta_type == 'list':\n self._list.extend(item)\n return", - "docstring": "Extend list from object, if object is list." - }, - { - "code": "def tracemessage(self, maxlen=6):\n result = \"\"\n for i, value in enumerate(self):\n result += \"{0}: {1}\\n\".format(i, get_node_repr(value))\n result = result.strip(\"\\n\")\n lines = result.split(\"\\n\")\n if maxlen and len(lines) > maxlen:\n i = int(maxlen / 2)\n lines = lines[:i] + [\"...\"] + lines[-(maxlen - i) :]\n result = \"\\n\".join(lines)\n return result", - "docstring": "if maxlen > 0, the message is shortened to maxlen traces." - }, - { - "code": "def place_project_bid(session, project_id, bidder_id, description, amount,\n period, milestone_percentage):\n bid_data = {\n 'project_id': project_id,\n 'bidder_id': bidder_id,\n 'description': description,\n 'amount': amount,\n 'period': period,\n 'milestone_percentage': milestone_percentage,\n }\n response = make_post_request(session, 'bids', json_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n bid_data = json_data['result']\n return Bid(bid_data)\n else:\n raise BidNotPlacedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Place a bid on a project" - }, - { - "code": "def expected_utility(a, s, U, mdp):\n \"The expected utility of doing a in state s, according to the MDP and U.\"\n return sum([p * U[s1] for (p, s1) in mdp.T(s, a)])", - "docstring": "The expected utility of doing a in state s, according to the MDP and U." - }, - { - "code": "def from_separate(cls, meta: ProgramDescription, vertex_source, geometry_source=None, fragment_source=None,\r\n tess_control_source=None, tess_evaluation_source=None):\r\n instance = cls(meta)\r\n instance.vertex_source = ShaderSource(\r\n VERTEX_SHADER,\r\n meta.path or meta.vertex_shader,\r\n vertex_source,\r\n )\r\n if geometry_source:\r\n instance.geometry_source = ShaderSource(\r\n GEOMETRY_SHADER,\r\n meta.path or meta.geometry_shader,\r\n geometry_source,\r\n )\r\n if fragment_source:\r\n instance.fragment_source = ShaderSource(\r\n FRAGMENT_SHADER,\r\n meta.path or meta.fragment_shader,\r\n fragment_source,\r\n )\r\n if tess_control_source:\r\n instance.tess_control_source = ShaderSource(\r\n TESS_CONTROL_SHADER,\r\n meta.path or meta.tess_control_shader,\r\n tess_control_source,\r\n )\r\n if tess_evaluation_source:\r\n instance.tess_evaluation_source = ShaderSource(\r\n TESS_EVALUATION_SHADER,\r\n meta.path or meta.tess_control_shader,\r\n tess_evaluation_source,\r\n )\r\n return instance", - "docstring": "Initialize multiple shader strings" - }, - { - "code": "def append(self, object):\n the_id = object.id\n self._check(the_id)\n self._dict[the_id] = len(self)\n list.append(self, object)", - "docstring": "append object to end" - }, - { - "code": "def _zipped(self, docs_base):\n with pushd(docs_base):\n with tempfile.NamedTemporaryFile(prefix='pythonhosted-', delete=False) as ziphandle:\n pass\n zip_name = shutil.make_archive(ziphandle.name, 'zip')\n notify.info(\"Uploading {:.1f} MiB from '{}' to '{}'...\"\n .format(os.path.getsize(zip_name) / 1024.0, zip_name, self.target))\n with io.open(zip_name, 'rb') as zipread:\n try:\n yield zipread\n finally:\n os.remove(ziphandle.name)\n os.remove(ziphandle.name + '.zip')", - "docstring": "Provide a zipped stream of the docs tree." - }, - { - "code": "def show_message(self, message_str):\n if self._message_handle is not None:\n self._message_handle.cancel()\n self._message_handle = asyncio.get_event_loop().call_later(\n self._MESSAGE_DELAY_SECS, self._clear_message\n )\n self._message = message_str\n self._update()", - "docstring": "Show a temporary message." - }, - { - "code": "def write_targets(self):\n if len(self.ldap_strings) == 0 and len(self.ips) == 0:\n print_notification(\"No targets left\")\n if self.auto_exit:\n if self.notifier:\n self.notifier.stop()\n self.terminate_processes()\n with open(self.targets_file, 'w') as f:\n f.write('\\n'.join(self.ldap_strings + self.ips))", - "docstring": "write_targets will write the contents of ips and ldap_strings to the targets_file." - }, - { - "code": "def reset_colors(self):\n for k, e in enumerate(self.g.edges()):\n self.g.set_ep(e, 'edge_color', self.edge2queue[k].colors['edge_color'])\n for v in self.g.nodes():\n self.g.set_vp(v, 'vertex_fill_color', self.colors['vertex_fill_color'])", - "docstring": "Resets all edge and vertex colors to their default values." - }, - { - "code": "def when_called_with(self, *some_args, **some_kwargs):\n if not self.expected:\n raise TypeError('expected exception not set, raises() must be called first')\n try:\n self.val(*some_args, **some_kwargs)\n except BaseException as e:\n if issubclass(type(e), self.expected):\n return AssertionBuilder(str(e), self.description, self.kind)\n else:\n self._err('Expected <%s> to raise <%s> when called with (%s), but raised <%s>.' % (\n self.val.__name__,\n self.expected.__name__,\n self._fmt_args_kwargs(*some_args, **some_kwargs),\n type(e).__name__))\n self._err('Expected <%s> to raise <%s> when called with (%s).' % (\n self.val.__name__,\n self.expected.__name__,\n self._fmt_args_kwargs(*some_args, **some_kwargs)))", - "docstring": "Asserts the val callable when invoked with the given args and kwargs raises the expected exception." - }, - { - "code": "def convert(self, value, param, ctx):\n if not isinstance(value, str):\n return value\n if isinstance(value, six.binary_type):\n value = value.decode('UTF-8')\n if value.startswith('@'):\n filename = os.path.expanduser(value[1:])\n file_obj = super(Variables, self).convert(filename, param, ctx)\n if hasattr(file_obj, 'read'):\n return file_obj.read()\n return file_obj\n return value", - "docstring": "Return file content if file, else, return value as-is" - }, - { - "code": "def print_sizes(self):\n float_size = 4\n MB = 1024 * 1024\n size_ = self.n_samples * float_size\n em_size = size_ * self.num_particles / MB\n pos_size = 3 * size_ * self.num_particles / MB\n print(\" Number of particles:\", self.num_particles)\n print(\" Number of time steps:\", self.n_samples)\n print(\" Emission array - 1 particle (float32): %.1f MB\" % (size_ / MB))\n print(\" Emission array (float32): %.1f MB\" % em_size)\n print(\" Position array (float32): %.1f MB \" % pos_size)", - "docstring": "Print on-disk array sizes required for current set of parameters." - }, - { - "code": "def _is_simple_numeric(data):\r\n for item in data:\r\n if isinstance(item, set):\r\n item = list(item)\r\n if isinstance(item, list):\r\n if not _is_simple_numeric(item):\r\n return False\r\n elif not isinstance(item, (int, float, complex)):\r\n return False\r\n return True", - "docstring": "Test if a list contains simple numeric data." - }, - { - "code": "def _clear_celery_task_data(self, my_task):\n if 'task_id' in my_task.internal_data:\n history = my_task._get_internal_data('task_history', [])\n history.append(my_task._get_internal_data('task_id'))\n del my_task.internal_data['task_id']\n my_task._set_internal_data(task_history=history)\n if 'task_state' in my_task.internal_data:\n del my_task.internal_data['task_state']\n if 'error' in my_task.internal_data:\n del my_task.internal_data['error']\n if hasattr(my_task, 'async_call'):\n delattr(my_task, 'async_call')\n if hasattr(my_task, 'deserialized'):\n delattr(my_task, 'deserialized')", - "docstring": "Clear celery task data" - }, - { - "code": "def _timeout_cb(self, method):\n self._anything_done = True\n logger.debug(\"_timeout_cb() called for: {0!r}\".format(method))\n result = method()\n rec = method._pyxmpp_recurring\n if rec:\n self._prepare_pending()\n return True\n if rec is None and result is not None:\n logger.debug(\" auto-recurring, restarting in {0} s\"\n .format(result))\n tag = glib.timeout_add(int(result * 1000), self._timeout_cb, method)\n self._timer_sources[method] = tag\n else:\n self._timer_sources.pop(method, None)\n self._prepare_pending()\n return False", - "docstring": "Call the timeout handler due." - }, - { - "code": "def load_exif(album):\n if not hasattr(album.gallery, \"exifCache\"):\n _restore_cache(album.gallery)\n cache = album.gallery.exifCache\n for media in album.medias:\n if media.type == \"image\":\n key = os.path.join(media.path, media.filename)\n if key in cache:\n media.exif = cache[key]", - "docstring": "Loads the exif data of all images in an album from cache" - }, - { - "code": "def flatten_grads(var_list, grads):\n return tf.concat([tf.reshape(grad, [U.numel(v)])\n for (v, grad) in zip(var_list, grads)], 0)", - "docstring": "Flattens a variables and their gradients." - }, - { - "code": "def fetch_by_url(self, url):\n service = self.collection.find_one({'url': url})\n if not service:\n raise ServiceNotFound\n return Service(service)", - "docstring": "Gets service for given ``url`` from mongodb storage." - }, - { - "code": "async def lookup_entities(client, args):\n lookup_spec = _get_lookup_spec(args.entity_identifier)\n request = hangups.hangouts_pb2.GetEntityByIdRequest(\n request_header=client.get_request_header(),\n batch_lookup_spec=[lookup_spec],\n )\n res = await client.get_entity_by_id(request)\n for entity_result in res.entity_result:\n for entity in entity_result.entity:\n print(entity)", - "docstring": "Search for entities by phone number, email, or gaia_id." - }, - { - "code": "def read_value(self):\n pass\n self._value_read.clear()\n self._device._peripheral.readValueForDescriptor(self._descriptor)\n if not self._value_read.wait(timeout_sec):\n raise RuntimeError('Exceeded timeout waiting to read characteristic value!')\n return self._value", - "docstring": "Read the value of this descriptor." - }, - { - "code": "def CherryPyWSGIServer(bind_addr,\n wsgi_app,\n numthreads = 10,\n server_name = None,\n max = -1,\n request_queue_size = 5,\n timeout = 10,\n shutdown_timeout = 5):\n max_threads = max\n if max_threads < 0:\n max_threads = 0\n return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app},\n min_threads = numthreads,\n max_threads = max_threads,\n queue_size = request_queue_size,\n timeout = timeout)", - "docstring": "A Cherrypy wsgiserver-compatible wrapper." - }, - { - "code": "def _image_name_from_url(url):\n find = r'https?://|[^\\w]'\n replace = '_'\n return re.sub(find, replace, url).strip('_')", - "docstring": "Create a nice image name from the url." - }, - { - "code": "def object(self, infotype, key):\n \"Return the encoding, idletime, or refcount about the key\"\n redisent = self.redises[self._getnodenamefor(key) + '_slave']\n return getattr(redisent, 'object')(infotype, key)", - "docstring": "Return the encoding, idletime, or refcount about the key" - }, - { - "code": "def do_toggle_fullscreen(self, action):\n is_fullscreen = action.get_active()\n if is_fullscreen:\n self.fullscreen()\n else:\n self.unfullscreen()", - "docstring": "Widget Action to Toggle fullscreen from the GUI" - }, - { - "code": "def _parse_genotype(self, vcf_fields):\n format_col = vcf_fields[8].split(':')\n genome_data = vcf_fields[9].split(':')\n try:\n gt_idx = format_col.index('GT')\n except ValueError:\n return []\n return [int(x) for x in re.split(r'[\\|/]', genome_data[gt_idx]) if\n x != '.']", - "docstring": "Parse genotype from VCF line data" - }, - { - "code": "def encode(plaintext, code):\n \"Encodes text, using a code which is a permutation of the alphabet.\"\n from string import maketrans\n trans = maketrans(alphabet + alphabet.upper(), code + code.upper())\n return plaintext.translate(trans)", - "docstring": "Encodes text, using a code which is a permutation of the alphabet." - }, - { - "code": "def _sorted_copy(self, comparison, reversed=False):\n sorted = self.copy()\n _list.sort(sorted, comparison)\n if reversed:\n _list.reverse(sorted)\n return sorted", - "docstring": "Returns a sorted copy with the colors arranged according to the given comparison." - }, - { - "code": "def search_images(q, start=0, size=\"\", wait=10, asynchronous=False, cached=False):\n service = GOOGLE_IMAGES\n return GoogleSearch(q, start, service, size, wait, asynchronous, cached)", - "docstring": "Returns a Google images query formatted as a GoogleSearch list object." - }, - { - "code": "def file_empty(fp):\n if six.PY2:\n contents = fp.read()\n fp.seek(0)\n return not bool(contents)\n else:\n return not fp.peek()", - "docstring": "Determine if a file is empty or not." - }, - { - "code": "def wait_for_page_to_load(self):\n self.wait.until(lambda _: self.loaded)\n self.pm.hook.pypom_after_wait_for_page_to_load(page=self)\n return self", - "docstring": "Wait for the page to load." - }, - { - "code": "def format_mtime(mtime):\n now = datetime.now()\n dt = datetime.fromtimestamp(mtime)\n return '%s %2d %5s' % (\n dt.strftime('%b'), dt.day,\n dt.year if dt.year != now.year else dt.strftime('%H:%M'))", - "docstring": "Format the date associated with a file to be displayed in directory listing." - }, - { - "code": "def union(self, i):\n if self.intersects(i) or self.end + 1 == i.start or i.end + 1 == self.start:\n return Interval(min(self.start, i.start), max(self.end, i.end))\n else:\n return None", - "docstring": "If intervals intersect, returns their union, otherwise returns None" - }, - { - "code": "def similar_to(partial_zipcode, zips=_zips):\n return [z for z in zips if z[\"zip_code\"].startswith(partial_zipcode)]", - "docstring": "List of zipcode dicts where zipcode prefix matches `partial_zipcode`" - }, - { - "code": "def _smixins(self, name):\n return (self._mixins[name] if name in self._mixins else False)", - "docstring": "Inner wrapper to search for mixins by name." - }, - { - "code": "def money(min=0, max=10):\n value = random.choice(range(min * 100, max * 100))\n return \"%1.2f\" % (float(value) / 100)", - "docstring": "Return a str of decimal with two digits after a decimal mark." - }, - { - "code": "def colors(self, n=10, d=0.035):\n s = sum([w for clr, rng, w in self.ranges])\n colors = colorlist()\n for i in _range(n):\n r = random()\n for clr, rng, weight in self.ranges:\n if weight / s >= r: break\n r -= weight / s\n colors.append(rng(clr, d))\n return colors", - "docstring": "Returns a number of random colors from the theme." - }, - { - "code": "def write_data(data, filename):\n name, ext = get_file_extension(filename)\n func = json_write_data if ext == '.json' else yaml_write_data\n return func(data, filename)", - "docstring": "Call right func to save data according to file extension" - }, - { - "code": "def request(self, batch, attempt=0):\n try:\n q = self.api.new_queue()\n for msg in batch:\n q.add(msg['event'], msg['value'], source=msg['source'])\n q.submit()\n except:\n if attempt > self.retries:\n raise\n self.request(batch, attempt+1)", - "docstring": "Attempt to upload the batch and retry before raising an error" - }, - { - "code": "def convert_charref(self, name):\n try:\n n = int(name)\n except ValueError:\n return\n if not 0 <= n <= 127 :\n return\n return self.convert_codepoint(n)", - "docstring": "This method fixes a bug in Python's SGMLParser." - }, - { - "code": "def _transmit_create(self, channel_metadata_item_map):\n for chunk in chunks(channel_metadata_item_map, self.enterprise_configuration.transmission_chunk_size):\n serialized_chunk = self._serialize_items(list(chunk.values()))\n try:\n self.client.create_content_metadata(serialized_chunk)\n except ClientError as exc:\n LOGGER.error(\n 'Failed to update [%s] content metadata items for integrated channel [%s] [%s]',\n len(chunk),\n self.enterprise_configuration.enterprise_customer.name,\n self.enterprise_configuration.channel_code,\n )\n LOGGER.error(exc)\n else:\n self._create_transmissions(chunk)", - "docstring": "Transmit content metadata creation to integrated channel." - }, - { - "code": "def update_security_group_rule(context, id, security_group_rule):\n LOG.info(\"update_security_group_rule for tenant %s\" %\n (context.tenant_id))\n new_rule = security_group_rule[\"security_group_rule\"]\n new_rule = _filter_update_security_group_rule(new_rule)\n with context.session.begin():\n rule = db_api.security_group_rule_find(context, id=id,\n scope=db_api.ONE)\n if not rule:\n raise sg_ext.SecurityGroupRuleNotFound(id=id)\n db_rule = db_api.security_group_rule_update(context, rule, **new_rule)\n group_id = db_rule.group_id\n group = db_api.security_group_find(context, id=group_id,\n scope=db_api.ONE)\n if not group:\n raise sg_ext.SecurityGroupNotFound(id=group_id)\n if group:\n _perform_async_update_rule(context, group_id, group, rule.id,\n RULE_UPDATE)\n return v._make_security_group_rule_dict(db_rule)", - "docstring": "Updates a rule and updates the ports" - }, - { - "code": "def _setTypes(self, encoderSpec):\n if self.encoderType is None:\n if self.dataType in ['int','float']:\n self.encoderType='adaptiveScalar'\n elif self.dataType=='string':\n self.encoderType='category'\n elif self.dataType in ['date', 'datetime']:\n self.encoderType='date'\n if self.dataType is None:\n if self.encoderType in ['scalar','adaptiveScalar']:\n self.dataType='float'\n elif self.encoderType in ['category', 'enumeration']:\n self.dataType='string'\n elif self.encoderType in ['date', 'datetime']:\n self.dataType='datetime'", - "docstring": "Set up the dataTypes and initialize encoders" - }, - { - "code": "def _usage(prog_name=os.path.basename(sys.argv[0])):\n spacer = ' ' * len('usage: ')\n usage = prog_name + ' -b LIST [-S SEPARATOR] [file ...]\\n' \\\n + spacer + prog_name + ' -c LIST [-S SEPERATOR] [file ...]\\n' \\\n + spacer + prog_name \\\n + ' -f LIST [-d DELIM] [-e] [-S SEPERATOR] [-s] [file ...]'\n return \"usage: \" + usage.rstrip()", - "docstring": "Returns usage string with no trailing whitespace." - }, - { - "code": "def _find_lcs(self, node, stringIdxs):\n nodes = [self._find_lcs(n, stringIdxs)\n for (n,_) in node.transition_links\n if n.generalized_idxs.issuperset(stringIdxs)]\n if nodes == []:\n return node\n deepestNode = max(nodes, key=lambda n: n.depth)\n return deepestNode", - "docstring": "Helper method that finds LCS by traversing the labeled GSD." - }, - { - "code": "def from_server(cls, server, slug, identifier):\n task = server.get(\n 'task',\n replacements={\n 'slug': slug,\n 'identifier': identifier})\n return cls(**task)", - "docstring": "Retrieve a task from the server" - }, - { - "code": "def _stream_data(self, environ, content_length, block_size):\n if content_length == 0:\n _logger.info(\"PUT: Content-Length == 0. Creating empty file...\")\n else:\n assert content_length > 0\n contentremain = content_length\n while contentremain > 0:\n n = min(contentremain, block_size)\n readbuffer = environ[\"wsgi.input\"].read(n)\n if not len(readbuffer) > 0:\n _logger.error(\"input.read({}) returned 0 bytes\".format(n))\n break\n environ[\"wsgidav.some_input_read\"] = 1\n yield readbuffer\n contentremain -= len(readbuffer)\n if contentremain == 0:\n environ[\"wsgidav.all_input_read\"] = 1", - "docstring": "Get the data from a non-chunked transfer." - }, - { - "code": "def process_file(self, file):\n if sys.version_info[0] >= 3:\n nxt = file.__next__\n else:\n nxt = file.next\n for token in tokenize.generate_tokens(nxt):\n self.process_token(*token)\n self.make_index()", - "docstring": "Process a file object." - }, - { - "code": "def load_source(self):\n if self.filename in self.STDIN_NAMES:\n self.filename = \"stdin\"\n if sys.version_info[0] < 3:\n self.source = sys.stdin.read()\n else:\n self.source = TextIOWrapper(sys.stdin.buffer, errors=\"ignore\").read()\n else:\n handle = tokenize_open(self.filename)\n self.source = handle.read()\n handle.close()", - "docstring": "Load the source for the specified file." - }, - { - "code": "def whoami(self, msg, args):\n output = [\"Hello %s\" % msg.user]\n if hasattr(self._bot.dispatcher, 'auth_manager') and msg.user.is_admin is True:\n output.append(\"You are a *bot admin*.\")\n output.append(\"Bot version: %s-%s\" % (self._bot.version, self._bot.commit))\n return '\\n'.join(output)", - "docstring": "Prints information about the user and bot version." - }, - { - "code": "def psffunc(self, x, y, z, **kwargs):\n if self.polychromatic:\n func = psfcalc.calculate_polychrome_pinhole_psf\n else:\n func = psfcalc.calculate_pinhole_psf\n x0, y0 = [psfcalc.vec_to_halfvec(v) for v in [x,y]]\n vls = psfcalc.wrap_and_calc_psf(x0, y0, z, func, **kwargs)\n return vls / vls.sum()", - "docstring": "Calculates a pinhole psf" - }, - { - "code": "def p_file_comments_on_lics(self, f_term, predicate):\n try:\n for _, _, comment in self.graph.triples((f_term, predicate, None)):\n self.builder.set_file_license_comment(self.doc, six.text_type(comment))\n except CardinalityError:\n self.more_than_one_error('file comments on license')", - "docstring": "Sets file license comment." - }, - { - "code": "def _from_parts(cls, args, init=True):\n if args:\n args = list(args)\n if isinstance(args[0], WindowsPath2):\n args[0] = args[0].path\n elif args[0].startswith(\"\\\\\\\\?\\\\\"):\n args[0] = args[0][4:]\n args = tuple(args)\n return super(WindowsPath2, cls)._from_parts(args, init)", - "docstring": "Strip \\\\?\\ prefix in init phase" - }, - { - "code": "def _getTarball(url, into_directory, cache_key, origin_info=None):\n try:\n access_common.unpackFromCache(cache_key, into_directory)\n except KeyError as e:\n tok = settings.getProperty('github', 'authtoken')\n headers = {}\n if tok is not None:\n headers['Authorization'] = 'token ' + str(tok)\n logger.debug('GET %s', url)\n response = requests.get(url, allow_redirects=True, stream=True, headers=headers)\n response.raise_for_status()\n logger.debug('getting file: %s', url)\n logger.debug('headers: %s', response.headers)\n response.raise_for_status()\n access_common.unpackTarballStream(\n stream = response,\n into_directory = into_directory,\n hash = {},\n cache_key = cache_key,\n origin_info = origin_info\n )", - "docstring": "unpack the specified tarball url into the specified directory" - }, - { - "code": "def image(self, path, x, y, width=None, height=None, alpha=1.0, data=None, draw=True, **kwargs):\n return self.Image(path, x, y, width, height, alpha, data, **kwargs)", - "docstring": "Draws a image form path, in x,y and resize it to width, height dimensions." - }, - { - "code": "def anumb_to_atom(self, anumb):\n assert isinstance(anumb, int), \"anumb must be integer\"\n if not self._anumb_to_atom:\n if self.atoms:\n for atom in self.atoms:\n self._anumb_to_atom[atom.number] = atom\n return self._anumb_to_atom[anumb]\n else:\n self.logger(\"no atoms in the molecule\")\n return False\n else:\n if anumb in self._anumb_to_atom:\n return self._anumb_to_atom[anumb]\n else:\n self.logger(\"no such atom number ({0:d}) in the molecule\".format(anumb))\n return False", - "docstring": "Returns the atom object corresponding to an atom number" - }, - { - "code": "def put(self, state_id):\n self._states.append(state_id)\n self._lock.notify_all()\n return state_id", - "docstring": "Enqueue it for processing" - }, - { - "code": "def clone(self):\n return StreamThrottle(\n read=self.read.clone(),\n write=self.write.clone()\n )", - "docstring": "Clone throttles without memory" - }, - { - "code": "def _get_version(self):\n version = self._get_param(param=\"version\", allowed_values=allowed_versions[self.params['service']],\n optional=True)\n if version is None and self._get_request_type() != \"getcapabilities\":\n raise OWSMissingParameterValue('Parameter \"version\" is missing', value=\"version\")\n else:\n return version", - "docstring": "Find requested version in GET request." - }, - { - "code": "def run(self):\n logger.info(u'Started listening')\n while not self._stop:\n xml = self._readxml()\n if xml is None:\n break\n if not self.modelize:\n logger.info(u'Raw xml: %s' % xml)\n self.results.put(xml)\n continue\n if xml.tag == 'RECOGOUT':\n sentence = Sentence.from_shypo(xml.find('SHYPO'), self.encoding)\n logger.info(u'Modelized recognition: %r' % sentence)\n self.results.put(sentence)\n else:\n logger.info(u'Unmodelized xml: %s' % xml)\n self.results.put(xml)\n logger.info(u'Stopped listening')", - "docstring": "Start listening to the server" - }, - { - "code": "def async_process(fn):\n def run(*args, **kwargs):\n proc = mp.Process(target=fn, args=args, kwargs=kwargs)\n proc.start()\n return proc\n return run", - "docstring": "Decorator function to launch a function as a separate process" - }, - { - "code": "def update_title_to_proceeding(self):\n titles = record_get_field_instances(self.record,\n tag=\"245\")\n for title in titles:\n subs = field_get_subfields(title)\n new_subs = []\n if \"a\" in subs:\n new_subs.append((\"a\", subs['a'][0]))\n if \"b\" in subs:\n new_subs.append((\"c\", subs['b'][0]))\n record_add_field(self.record,\n tag=\"111\",\n subfields=new_subs)\n record_delete_fields(self.record, tag=\"245\")\n record_delete_fields(self.record, tag=\"246\")", - "docstring": "Move title info from 245 to 111 proceeding style." - }, - { - "code": "def div_img(img1, div2):\n if is_img(div2):\n return img1.get_data()/div2.get_data()\n elif isinstance(div2, (float, int)):\n return img1.get_data()/div2\n else:\n raise NotImplementedError('Cannot divide {}({}) by '\n '{}({})'.format(type(img1),\n img1,\n type(div2),\n div2))", - "docstring": "Pixelwise division or divide by a number" - }, - { - "code": "def prepare_output(self, row):\n date_fields = ['last-update', 'create-time', 'start-time', 'end-time']\n int_fields = ['task-attempt']\n for col in date_fields:\n if col in row:\n row[col] = self.default_format_date(row[col])\n for col in int_fields:\n if col in row and row[col] is not None:\n row[col] = int(row[col])\n return row", - "docstring": "Convert types of task fields." - }, - { - "code": "def _validate_sample_rates(input_filepath_list, combine_type):\n sample_rates = [\n file_info.sample_rate(f) for f in input_filepath_list\n ]\n if not core.all_equal(sample_rates):\n raise IOError(\n \"Input files do not have the same sample rate. The {} combine \"\n \"type requires that all files have the same sample rate\"\n .format(combine_type)\n )", - "docstring": "Check if files in input file list have the same sample rate" - }, - { - "code": "def _restart(self, my_task):\n if not my_task._has_state(Task.WAITING):\n raise WorkflowException(my_task, \"Cannot refire a task that is not\"\n \"in WAITING state\")\n if my_task._get_internal_data('task_id') is not None:\n if not hasattr(my_task, 'async_call'):\n task_id = my_task._get_internal_data('task_id')\n my_task.async_call = default_app.AsyncResult(task_id)\n my_task.deserialized = True\n my_task.async_call.state\n async_call = my_task.async_call\n if async_call.state == 'FAILED':\n pass\n elif async_call.state in ['RETRY', 'PENDING', 'STARTED']:\n async_call.revoke()\n LOG.info(\"Celery task '%s' was in %s state and was revoked\" % (\n async_call.state, async_call))\n elif async_call.state == 'SUCCESS':\n LOG.warning(\"Celery task '%s' succeeded, but a refire was \"\n \"requested\" % async_call)\n self._clear_celery_task_data(my_task)\n return self._start(my_task)", - "docstring": "Abort celery task and retry it" - }, - { - "code": "def users():\n from invenio_groups.models import Group, Membership, \\\n PrivacyPolicy, SubscriptionPolicy\n admin = accounts.datastore.create_user(\n email='admin@inveniosoftware.org',\n password=encrypt_password('123456'),\n active=True,\n )\n reader = accounts.datastore.create_user(\n email='reader@inveniosoftware.org',\n password=encrypt_password('123456'),\n active=True,\n )\n admins = Group.create(name='admins', admins=[admin])\n for i in range(10):\n Group.create(name='group-{0}'.format(i), admins=[admin])\n Membership.create(admins, reader)\n db.session.commit()", - "docstring": "Load default users and groups." - }, - { - "code": "def crop(image, top_offset=0, left_offset=0, bottom_offset=0, right_offset=0):\n if bottom_offset or top_offset or left_offset or right_offset:\n width, height = image.size\n box = (left_offset, top_offset,\n width - right_offset, height - bottom_offset)\n image = image.crop(box=box)\n return image", - "docstring": "Return an image cropped on top, bottom, left or right." - }, - { - "code": "def restart(self):\r\n if self._engine:\r\n self._engine.repl.terminate()\r\n executable = self._executable\r\n if executable:\r\n os.environ['OCTAVE_EXECUTABLE'] = executable\r\n if 'OCTAVE_EXECUTABLE' not in os.environ and 'OCTAVE' in os.environ:\r\n os.environ['OCTAVE_EXECUTABLE'] = os.environ['OCTAVE']\r\n self._engine = OctaveEngine(stdin_handler=self._handle_stdin,\r\n logger=self.logger)\r\n self._engine.eval('addpath(\"%s\");' % HERE.replace(osp.sep, '/'))", - "docstring": "Restart an Octave session in a clean state" - }, - { - "code": "def first(self, symbols):\n ret = set()\n if EPSILON in symbols:\n return set([EPSILON])\n for symbol in symbols:\n ret |= self._first[symbol] - set([EPSILON])\n if EPSILON not in self._first[symbol]:\n break\n else:\n ret.add(EPSILON)\n return ret", - "docstring": "Computes the intermediate FIRST set using symbols." - }, - { - "code": "def reduce_chunk(func, array):\n res = []\n for slice in iter_chunk_slice(array.shape[-1], array.chunkshape[-1]):\n res.append(func(array[..., slice]))\n return func(res)", - "docstring": "Reduce with `func`, chunk by chunk, the passed pytable `array`." - }, - { - "code": "def execute(helper, config, args):\n env = parse_env_config(config, args.environment)\n option_settings = env.get('option_settings', {})\n settings = parse_option_settings(option_settings)\n for setting in settings:\n out(str(setting))", - "docstring": "dump command dumps things" - }, - { - "code": "def gotonext(self):\n while self.pos < len(self.field):\n if self.field[self.pos] in self.LWS + '\\n\\r':\n self.pos = self.pos + 1\n elif self.field[self.pos] == '(':\n self.commentlist.append(self.getcomment())\n else: break", - "docstring": "Parse up to the start of the next address." - }, - { - "code": "def refresh(self, accept=MEDIA_TYPE_TAXII_V20):\n self.refresh_information(accept)\n self.refresh_collections(accept)", - "docstring": "Update the API Root's information and list of Collections" - }, - { - "code": "def sha_github_file(cls, config, repo_file, repository_api, repository_branch):\n repo_file_sha = None\n cfg = config.get_conf()\n github_token = cfg['sortinghat']['identities_api_token']\n headers = {\"Authorization\": \"token \" + github_token}\n url_dir = repository_api + \"/git/trees/\" + repository_branch\n logger.debug(\"Gettting sha data from tree: %s\", url_dir)\n raw_repo_file_info = requests.get(url_dir, headers=headers)\n raw_repo_file_info.raise_for_status()\n for rfile in raw_repo_file_info.json()['tree']:\n if rfile['path'] == repo_file:\n logger.debug(\"SHA found: %s, \", rfile[\"sha\"])\n repo_file_sha = rfile[\"sha\"]\n break\n return repo_file_sha", - "docstring": "Return the GitHub SHA for a file in the repository" - }, - { - "code": "def start_watching(self):\n if self.watcher and self.watcher.is_alive():\n return\n self.watcher = Watcher()\n self.watcher.start()", - "docstring": "Begins watching etcd for changes." - }, - { - "code": "def replace(self, year=None, month=None, day=None):\n if year is None:\n year = self._year\n if month is None:\n month = self._month\n if day is None:\n day = self._day\n return date.__new__(type(self), year, month, day)", - "docstring": "Return a new date with new values for the specified fields." - }, - { - "code": "def _MakeParallelBenchmark(p, work_func, *args):\n def Benchmark(b):\n e = threading.Event()\n def Target():\n e.wait()\n for _ in xrange(b.N / p):\n work_func(*args)\n threads = []\n for _ in xrange(p):\n t = threading.Thread(target=Target)\n t.start()\n threads.append(t)\n b.ResetTimer()\n e.set()\n for t in threads:\n t.join()\n return Benchmark", - "docstring": "Create and return a benchmark that runs work_func p times in parallel." - }, - { - "code": "def parse_docstring(self, func_or_method: typing.Callable) -> dict:\n docstring = func_or_method.__doc__\n if not docstring:\n return {}\n docstring = docstring.split(\"---\")[-1]\n parsed = yaml.safe_load(docstring)\n if not isinstance(parsed, dict):\n return {}\n return parsed", - "docstring": "Given a function, parse the docstring as YAML and return a dictionary of info." - }, - { - "code": "def _generate(num_particles, D, box, rs):\n X0 = rs.rand(num_particles) * (box.x2 - box.x1) + box.x1\n Y0 = rs.rand(num_particles) * (box.y2 - box.y1) + box.y1\n Z0 = rs.rand(num_particles) * (box.z2 - box.z1) + box.z1\n return [Particle(D=D, x0=x0, y0=y0, z0=z0)\n for x0, y0, z0 in zip(X0, Y0, Z0)]", - "docstring": "Generate a list of `Particle` objects." - }, - { - "code": "def note_hz_to_midi(annotation):\n annotation.namespace = 'note_midi'\n data = annotation.pop_data()\n for obs in data:\n annotation.append(time=obs.time, duration=obs.duration,\n confidence=obs.confidence,\n value=12 * (np.log2(obs.value) - np.log2(440.0)) + 69)\n return annotation", - "docstring": "Convert a pitch_hz annotation to pitch_midi" - }, - { - "code": "def pitch_hz_to_contour(annotation):\n annotation.namespace = 'pitch_contour'\n data = annotation.pop_data()\n for obs in data:\n annotation.append(time=obs.time, duration=obs.duration,\n confidence=obs.confidence,\n value=dict(index=0,\n frequency=np.abs(obs.value),\n voiced=obs.value > 0))\n return annotation", - "docstring": "Convert a pitch_hz annotation to a contour" - }, - { - "code": "def data(self, X=None, y=None, sentences=None):\n self.X = X\n self.y = y\n self.sentences = sentences", - "docstring": "Add data to flow" - }, - { - "code": "def exec_file(self, path):\n filename = os.path.basename(path)\n log.info('Execute %s', filename)\n content = from_file(path).replace('\\r', '').split('\\n')\n res = '> '\n for line in content:\n line = line.rstrip('\\n')\n retlines = (res + self.__exchange(line)).splitlines()\n res = retlines.pop()\n for lin in retlines:\n log.info(lin)\n log.info(res)", - "docstring": "execute the lines in the local file 'path" - }, - { - "code": "def find_matching_files(self, includes):\n if len(includes) == 0: \n return [] \n files = [f['relativepath'] for f in self.package['resources']]\n includes = r'|'.join([fnmatch.translate(x) for x in includes])\n files = [f for f in files if re.match(includes, os.path.basename(f))] + \\\n [f for f in files if re.match(includes, f)]\n files = list(set(files))\n return files", - "docstring": "For various actions we need files that match patterns" - }, - { - "code": "def import_funcs(self, module):\n newcells = self._impl.new_cells_from_module(module)\n return get_interfaces(newcells)", - "docstring": "Create a cells from a module." - }, - { - "code": "def numeric_to_timedelta(self, numerics):\n if self.package == 'pandas':\n return [self.type(int(x*self.factor), units='ns')\n for x in numerics]\n else:\n return [self.type(seconds=x*self.factor)\n for x in numerics]", - "docstring": "Convert sequence of numerics to timedelta" - }, - { - "code": "def _wait_for_connection(self, port):\n connected = False\n max_tries = 10\n num_tries = 0\n wait_time = 0.5\n while not connected or num_tries >= max_tries:\n time.sleep(wait_time)\n try:\n af = socket.AF_INET\n addr = ('127.0.0.1', port)\n sock = socket.socket(af, socket.SOCK_STREAM)\n sock.connect(addr)\n except socket.error:\n if sock:\n sock.close()\n num_tries += 1\n continue\n connected = True\n if not connected:\n print(\"Error connecting to sphinx searchd\", file=sys.stderr)", - "docstring": "Wait until we can make a socket connection to sphinx." - }, - { - "code": "def resourcePath(self, relative_path):\n from os import path\n import sys\n try:\n base_path = sys._MEIPASS\n except Exception:\n base_path = path.dirname(path.abspath(__file__))\n return path.join(base_path, relative_path)", - "docstring": "Get absolute path to resource, works for dev and for PyInstaller" - }, - { - "code": "def validation_metrics(self):\n if (self._validation_iterator is None) or (self._validation_metrics is None):\n raise AttributeError('Validation is not setup.')\n n = 0.0\n metric_sums = [0.0] * len(self._validation_metrics)\n self._sess.run(self._validation_iterator.initializer)\n while True:\n try:\n metrics = self._sess.run(self._validation_metrics)\n for i, m in enumerate(metrics):\n metric_sums[i] += m\n n += 1.0\n except tf.errors.OutOfRangeError:\n break\n for i, m in enumerate(metric_sums):\n metric_sums[i] = metric_sums[i] / n\n return zip(self._validation_metrics, metric_sums)", - "docstring": "A helper function to compute validation related metrics" - }, - { - "code": "def embed_code_links(app, exception):\n if exception is not None:\n return\n if not app.builder.config.plot_gallery:\n return\n if app.builder.name not in ['html', 'readthedocs']:\n return\n print('Embedding documentation hyperlinks in examples..')\n gallery_conf = app.config.sphinx_gallery_conf\n gallery_dirs = gallery_conf['gallery_dirs']\n if not isinstance(gallery_dirs, list):\n gallery_dirs = [gallery_dirs]\n for gallery_dir in gallery_dirs:\n _embed_code_links(app, gallery_conf, gallery_dir)", - "docstring": "Embed hyperlinks to documentation into example code" - }, - { - "code": "def show_response_messages(response_json):\n message_type_kwargs = {\n 'warning': {'fg': 'yellow'},\n 'error': {'fg': 'red'},\n }\n for message in response_json.get('messages', []):\n click.secho(message['text'], **message_type_kwargs.get(message['type'], {}))", - "docstring": "Show all messages in the `messages` key of the given dict." - }, - { - "code": "def starts_with(self, prefix):\n if prefix is None:\n raise TypeError('given prefix arg must not be none')\n if isinstance(self.val, str_types):\n if not isinstance(prefix, str_types):\n raise TypeError('given prefix arg must be a string')\n if len(prefix) == 0:\n raise ValueError('given prefix arg must not be empty')\n if not self.val.startswith(prefix):\n self._err('Expected <%s> to start with <%s>, but did not.' % (self.val, prefix))\n elif isinstance(self.val, Iterable):\n if len(self.val) == 0:\n raise ValueError('val must not be empty')\n first = next(iter(self.val))\n if first != prefix:\n self._err('Expected %s to start with <%s>, but did not.' % (self.val, prefix))\n else:\n raise TypeError('val is not a string or iterable')\n return self", - "docstring": "Asserts that val is string or iterable and starts with prefix." - }, - { - "code": "def env_export(prefix, exported, env):\n for exp in exported:\n ENV[\"_\".join([prefix, exp])] = env[exp]", - "docstring": "Define the list of 'exported' variables with 'prefix' with values from 'env'" - }, - { - "code": "def _close(self):\n if self._state != \"closed\":\n self.event(DisconnectedEvent(self._dst_addr))\n self._set_state(\"closed\")\n if self._socket is None:\n return\n try:\n self._socket.shutdown(socket.SHUT_RDWR)\n except socket.error:\n pass\n self._socket.close()\n self._socket = None\n self._write_queue.clear()\n self._write_queue_cond.notify()", - "docstring": "Same as `_close` but expects `lock` acquired." - }, - { - "code": "def sum_out(var, factors, bn):\n \"Eliminate var from all factors by summing over its values.\"\n result, var_factors = [], []\n for f in factors:\n (var_factors if var in f.vars else result).append(f)\n result.append(pointwise_product(var_factors, bn).sum_out(var, bn))\n return result", - "docstring": "Eliminate var from all factors by summing over its values." - }, - { - "code": "def buttons(self):\n return [name for name, value in rController._buttons.items()\n if self.gamepad.wButtons & value == value]", - "docstring": "Returns a list of buttons currently pressed" - }, - { - "code": "def parse(config):\n if not isinstance(config, basestring):\n raise TypeError(\"Contains input must be a simple string\")\n validator = ContainsValidator()\n validator.contains_string = config\n return validator", - "docstring": "Parse a contains validator, which takes as the config a simple string to find" - }, - { - "code": "def md_dynamic_default_values_info(name, node):\n configurations = node.find(\"configurations\")\n if configurations is not None:\n configurations = []\n for n in node.findall(\"configuration\"):\n dimension = n.find(\"dimension\")\n dimension = dimension.text if dimension is not None else None\n policy = n.find(\"policy\")\n policy = policy.text if policy is not None else None\n defaultValueExpression = n.find(\"defaultValueExpression\")\n defaultValueExpression = defaultValueExpression.text if defaultValueExpression is not None else None\n configurations.append(DynamicDefaultValuesConfiguration(dimension, policy, defaultValueExpression))\n return DynamicDefaultValues(name, configurations)", - "docstring": "Extract metadata Dynamic Default Values from an xml node" - }, - { - "code": "def load(self):\r\n self.meta.resolved_path = self.find_data(self.meta.path)\r\n if not self.meta.resolved_path:\r\n raise ImproperlyConfigured(\"Data file '{}' not found\".format(self.meta.path))\r\n print(\"Loading:\", self.meta.path)\r\n with open(self.meta.resolved_path, 'r') as fd:\r\n return fd.read()", - "docstring": "Load a file in text mode" - }, - { - "code": "def _remove_closest_particle(self, p):\n dp = self.pos - p\n dist2 = (dp*dp).sum(axis=1)\n ind = dist2.argmin()\n rp = self.pos[ind].copy()\n self.pos = np.delete(self.pos, ind, axis=0)\n return rp", - "docstring": "removes the closest particle in self.pos to ``p``" - }, - { - "code": "def warning(msg):\n _flush()\n sys.stderr.write(\"\\033[1;7;33;40mWARNING: {}\\033[0m\\n\".format(msg))\n sys.stderr.flush()", - "docstring": "Emit a warning message." - }, - { - "code": "def storages(self):\n stores = self._json(self._get(self._storages_url), 200)\n stores = stores['data']\n for store in stores:\n yield Storage(store, self.session)", - "docstring": "Iterate over all storages for this projects." - }, - { - "code": "def cache_resolver(resolver, path):\n env = resolver.cache.find(path)\n if env:\n return env\n raise ResolveError", - "docstring": "Resolves VirtualEnvironments in EnvironmentCache" - }, - { - "code": "def _random_point(self):\n idx = np.random.randint(self.n_warmup,\n size=min(2, np.ceil(np.sqrt(self.n_warmup))))\n return self.warmup[idx, :].mean(axis=0)", - "docstring": "Find an approximately random point in the flux cone." - }, - { - "code": "def submit(course, tid=None, pastebin=False, review=False):\n if tid is not None:\n return submit_exercise(Exercise.byid(tid),\n pastebin=pastebin,\n request_review=review)\n else:\n sel = Exercise.get_selected()\n if not sel:\n raise NoExerciseSelected()\n return submit_exercise(sel, pastebin=pastebin, request_review=review)", - "docstring": "Submit the selected exercise to the server." - }, - { - "code": "def stdout(self):\n if self._streaming:\n stdout = []\n while not self.__stdout.empty():\n try:\n line = self.__stdout.get_nowait()\n stdout.append(line)\n except:\n pass\n else:\n stdout = self.__stdout\n return stdout", - "docstring": "Converts stdout string to a list." - }, - { - "code": "def extract_actions_from_class(record_class):\n for name in dir(record_class):\n method = getattr(record_class, name, None)\n if method and getattr(method, '__deposit_action__', False):\n yield method.__name__", - "docstring": "Extract actions from class." - }, - { - "code": "def add_organization_course(organization_data, course_key):\n _validate_course_key(course_key)\n _validate_organization_data(organization_data)\n data.create_organization_course(\n organization=organization_data,\n course_key=course_key\n )", - "docstring": "Adds a organization-course link to the system" - }, - { - "code": "def filter(self):\n self.content = [\n ch\n for ch in self.xml.getchildren()\n if get_bounds(ch).intersects(self.area)\n ]", - "docstring": "Filter the changesets that intersects with the geojson geometry." - }, - { - "code": "def escape_ID(cobra_model):\n for x in chain([cobra_model],\n cobra_model.metabolites,\n cobra_model.reactions,\n cobra_model.genes):\n x.id = _escape_str_id(x.id)\n cobra_model.repair()\n gene_renamer = _GeneEscaper()\n for rxn, rule in iteritems(get_compiled_gene_reaction_rules(cobra_model)):\n if rule is not None:\n rxn._gene_reaction_rule = ast2str(gene_renamer.visit(rule))", - "docstring": "makes all ids SBML compliant" - }, - { - "code": "def colors_no_palette(colors=None, **kwds):\n if isinstance(colors, str):\n colors = _split_colors(colors)\n else:\n colors = to_triplets(colors or ())\n colors = (color(c) for c in colors or ())\n return palette.Palette(colors, **kwds)", - "docstring": "Return a Palette but don't take into account Pallete Names." - }, - { - "code": "def clean(self):\n cleaned_data = super(EnterpriseCustomerReportingConfigAdminForm, self).clean()\n report_customer = cleaned_data.get('enterprise_customer')\n invalid_catalogs = [\n '{} ({})'.format(catalog.title, catalog.uuid)\n for catalog in cleaned_data.get('enterprise_customer_catalogs')\n if catalog.enterprise_customer != report_customer\n ]\n if invalid_catalogs:\n message = _(\n 'These catalogs for reporting do not match enterprise'\n 'customer {enterprise_customer}: {invalid_catalogs}',\n ).format(\n enterprise_customer=report_customer,\n invalid_catalogs=invalid_catalogs,\n )\n self.add_error('enterprise_customer_catalogs', message)", - "docstring": "Override of clean method to perform additional validation" - }, - { - "code": "def update(self, params, values):\n params = listify(params)\n values = listify(values)\n for i, p in enumerate(params):\n if (p[-2:] == '-a') and (values[i] < 0):\n values[i] = 0.0\n super(PlatonicSpheresCollection, self).update(params, values)", - "docstring": "Calls an update, but clips radii to be > 0" - }, - { - "code": "def redirect_to_env_paths(path):\n with open(path, 'r') as f:\n redirected = f.read()\n return shlex.split(redirected)", - "docstring": "Get environment path from redirect file" - }, - { - "code": "def run(self):\n\t\tfilename = \".DS_Store\"\n\t\tcommand = \"find {path} -type f -name \\\"{filename}\\\" \".format(path = self.path, filename = filename)\n\t\tcmd = CommandHelper(command)\n\t\tcmd.execute()\n\t\tfiles = cmd.output.split(\"\\n\")\n\t\tfor f in files:\n\t\t\tif not f.endswith(filename):\n\t\t\t\tcontinue\n\t\t\trel_path = f.replace(self.path, \"\")\n\t\t\tif rel_path.startswith(tuple(self.CONFIG['exclude_paths'])):\n\t\t\t\tcontinue\n\t\t\tissue = Issue()\n\t\t\tissue.name = \"File .DS_Store detected\"\n\t\t\tissue.potential = False\n\t\t\tissue.severity = Issue.SEVERITY_LOW\n\t\t\tissue.file = rel_path\n\t\t\tself.saveIssue(issue)", - "docstring": "Finds .DS_Store files into path" - }, - { - "code": "def _str_replacement(self, target, replacement):\n self.data = self.data.replace(target, replacement)", - "docstring": "Replace target with replacement" - }, - { - "code": "def release(self):\n if self.value is not None:\n self.value += 1\n if self.value > self.maximum_value:\n raise ValueError(\"Too many releases\")", - "docstring": "Release, incrementing the internal counter by one." - }, - { - "code": "def availableBranches(self):\n return [\n GithubComponentVersion(\n '', b[0], b[1], self.name, cache_key=None\n ) for b in _getBranchHeads(self.repo).items()\n ]", - "docstring": "return a list of GithubComponentVersion objects for the tip of each branch" - }, - { - "code": "def deploy_version(self, environment_name, version_label):\n out(\"Deploying \" + str(version_label) + \" to \" + str(environment_name))\n self.ebs.update_environment(environment_name=environment_name, version_label=version_label)", - "docstring": "Deploys a version to an environment" - }, - { - "code": "def write_value(self, value, write_type=0):\n data = NSData.dataWithBytes_length_(value, len(value))\n self._device._peripheral.writeValue_forCharacteristic_type_(data,\n self._characteristic,\n write_type)", - "docstring": "Write the specified value to this characteristic." - }, - { - "code": "def list(self, ignore_patterns):\n for prefix, root in self.locations:\n storage = self.storages[root]\n for path in utils.get_files(storage, ignore_patterns):\n yield path, storage", - "docstring": "List all files in all locations." - }, - { - "code": "def arkt_to_unixt(ark_timestamp):\n res = datetime.datetime(2017, 3, 21, 15, 55, 44) + datetime.timedelta(seconds=ark_timestamp)\n return res.timestamp()", - "docstring": "convert ark timestamp to unix timestamp" - }, - { - "code": "def record_delete_subfield(rec, tag, subfield_code, ind1=' ', ind2=' '):\n ind1, ind2 = _wash_indicators(ind1, ind2)\n for field in rec.get(tag, []):\n if field[1] == ind1 and field[2] == ind2:\n field[0][:] = [subfield for subfield in field[0]\n if subfield_code != subfield[0]]", - "docstring": "Delete all subfields with subfield_code in the record." - }, - { - "code": "def sameSegment(seg1, seg2):\n result = True\n for field in [1, 2, 3, 4, 5, 6]:\n if abs(seg1[0][field] - seg2[0][field]) > 0.001:\n result = False\n if len(seg1[1:]) != len(seg2[1:]):\n result = False\n for syn in seg2[1:]:\n if syn[2] <= 0:\n print \"A synapse with zero permanence encountered\"\n result = False\n if result == True:\n for syn in seg1[1:]:\n if syn[2] <= 0:\n print \"A synapse with zero permanence encountered\"\n result = False\n res = sameSynapse(syn, seg2[1:])\n if res == False:\n result = False\n return result", - "docstring": "Return True if seg1 and seg2 are identical, ignoring order of synapses" - }, - { - "code": "def connect(self):\n if not self.connected():\n self._ws = create_connection(self.WS_URI)\n message = {\n 'type':self.WS_TYPE,\n 'product_id':self.WS_PRODUCT_ID\n }\n self._ws.send(dumps(message))\n with self._lock:\n if not self._thread:\n thread = Thread(target=self._keep_alive_thread, args=[])\n thread.start()", - "docstring": "Connects and subscribes to the WebSocket Feed." - }, - { - "code": "def child(self, offset256):\n a = bytes(self) + offset256\n s = hashlib.sha256(a).digest()\n return self.add(s)", - "docstring": "Derive new public key from this key and a sha256 \"offset\"" - }, - { - "code": "def find_matches(strings, words, length_hoped):\n lower_words = [w.lower() for w in words]\n def has_match(string):\n lower_string = string.lower()\n for test_word in lower_words:\n if test_word in lower_string:\n return True\n return False\n shortened_strings = [textwrap.wrap(s) for s in strings]\n short_string_list = list(chain.from_iterable(shortened_strings))\n matches = [ms for ms in short_string_list if has_match(ms)]\n cumulative_len = 0\n break_at = None\n for idx, match in enumerate(matches):\n cumulative_len += len(match)\n if cumulative_len >= length_hoped:\n break_at = idx\n break\n return matches[0:break_at]", - "docstring": "Used by default property excerpt" - }, - { - "code": "def settings(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)", - "docstring": "Pass a load of settings into the canvas" - }, - { - "code": "def WorkerAgentGenerator(agent_class):\n if isinstance(agent_class, str):\n agent_class = AgentsDictionary.get(agent_class)\n if not agent_class and agent_class.find('.') != -1:\n module_name, function_name = agent_class.rsplit('.', 1)\n module = importlib.import_module(module_name)\n agent_class = getattr(module, function_name)\n class WorkerAgent(agent_class):\n def __init__(self, model=None, **kwargs):\n self.model = model\n if not issubclass(agent_class, LearningAgent):\n kwargs.pop(\"network\")\n super(WorkerAgent, self).__init__(**kwargs)\n def initialize_model(self):\n return self.model\n return WorkerAgent", - "docstring": "Worker Agent generator, receives an Agent class and creates a Worker Agent class that inherits from that Agent." - }, - { - "code": "def sf01(arr):\n s = arr.shape\n return arr.swapaxes(0, 1).reshape(s[0] * s[1], *s[2:])", - "docstring": "swap and then flatten axes 0 and 1" - }, - { - "code": "def exploit_single(self, ip, operating_system):\n result = None\n if \"Windows Server 2008\" in operating_system or \"Windows 7\" in operating_system:\n result = subprocess.run(['python2', os.path.join(self.datadir, 'MS17-010', 'eternalblue_exploit7.py'), str(ip), os.path.join(self.datadir, 'final_combined.bin'), \"12\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n elif \"Windows Server 2012\" in operating_system or \"Windows 10\" in operating_system or \"Windows 8.1\" in operating_system:\n result = subprocess.run(['python2', os.path.join(self.datadir, 'MS17-010', 'eternalblue_exploit8.py'), str(ip), os.path.join(self.datadir, 'final_combined.bin'), \"12\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n else:\n return [\"System target could not be automatically identified\"]\n return result.stdout.decode('utf-8').split('\\n')", - "docstring": "Exploits a single ip, exploit is based on the given operating system." - }, - { - "code": "def id(self):\n sigs = self.data[\"signatures\"]\n self.data.pop(\"signatures\", None)\n h = hashlib.sha256(bytes(self)).digest()\n self.data[\"signatures\"] = sigs\n return hexlify(h[:20]).decode(\"ascii\")", - "docstring": "The transaction id of this transaction" - }, - { - "code": "def parse(s):\n try:\n m = _regex.match(s)\n t = Tag(int(m.group('major')),\n int(m.group('minor')),\n int(m.group('patch')))\n return t \\\n if m.group('label') is None \\\n else t.with_revision(m.group('label'), int(m.group('number')))\n except AttributeError:\n return None", - "docstring": "Parses a string into a Tag" - }, - { - "code": "def line(self, p1, p2, resolution=1):\n xdiff = max(p1.x, p2.x) - min(p1.x, p2.x)\n ydiff = max(p1.y, p2.y) - min(p1.y, p2.y)\n xdir = [-1, 1][int(p1.x <= p2.x)]\n ydir = [-1, 1][int(p1.y <= p2.y)]\n r = int(round(max(xdiff, ydiff)))\n if r == 0:\n return\n for i in range((r + 1) * resolution):\n x = p1.x\n y = p1.y\n if xdiff:\n x += (float(i) * xdiff) / r * xdir / resolution\n if ydiff:\n y += (float(i) * ydiff) / r * ydir / resolution\n yield Point((x, y))", - "docstring": "Resolve the points to make a line between two points." - }, - { - "code": "def start_heron_tools(masters, cl_args):\n single_master = list(masters)[0]\n wait_for_master_to_start(single_master)\n cmd = \"%s run %s >> /tmp/heron_tools_start.log 2>&1 &\" \\\n % (get_nomad_path(cl_args), get_heron_tools_job_file(cl_args))\n Log.info(\"Starting Heron Tools on %s\" % single_master)\n if not is_self(single_master):\n cmd = ssh_remote_execute(cmd, single_master, cl_args)\n Log.debug(cmd)\n pid = subprocess.Popen(cmd,\n shell=True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n return_code = pid.wait()\n output = pid.communicate()\n Log.debug(\"return code: %s output: %s\" % (return_code, output))\n if return_code != 0:\n Log.error(\"Failed to start Heron Tools on %s with error:\\n%s\" % (single_master, output[1]))\n sys.exit(-1)\n wait_for_job_to_start(single_master, \"heron-tools\")\n Log.info(\"Done starting Heron Tools\")", - "docstring": "Start Heron tracker and UI" - }, - { - "code": "def list_services(self):\n my_services = []\n for service in self.name_index.values():\n my_services.append(Service(service))\n return my_services", - "docstring": "Lists all services in memory storage." - }, - { - "code": "def _swap_mode(self):\n assert self.mode in (cs.CS_MODE_ARM, cs.CS_MODE_THUMB)\n if self.mode == cs.CS_MODE_ARM:\n self.mode = cs.CS_MODE_THUMB\n else:\n self.mode = cs.CS_MODE_ARM", - "docstring": "Toggle between ARM and Thumb mode" - }, - { - "code": "def ancestors(self):\n results = []\n def recursive_find_ancestors(task, stack):\n for input in task.inputs:\n if input not in stack:\n stack.append(input)\n recursive_find_ancestors(input, stack)\n recursive_find_ancestors(self, results)\n return results", - "docstring": "Returns list of ancestor task specs based on inputs" - }, - { - "code": "def boilerplate(name, contact, description, pmids, version, copyright, authors, licenses, disclaimer, output):\n from .document_utils import write_boilerplate\n write_boilerplate(\n name=name,\n version=version,\n description=description,\n authors=authors,\n contact=contact,\n copyright=copyright,\n licenses=licenses,\n disclaimer=disclaimer,\n pmids=pmids,\n file=output,\n )", - "docstring": "Build a template BEL document with the given PubMed identifiers." - }, - { - "code": "def mem_size(self):\n data_len = self._data_mem_size\n node_count = len(list(self.xml_doc.iter(tag=etree.Element)))\n if self.compressed:\n size = 52 * node_count + data_len + 630\n else:\n tags_len = 0\n for e in self.xml_doc.iter(tag=etree.Element):\n e_len = max(len(e.tag), 8)\n e_len = (e_len + 3) & ~3\n tags_len += e_len\n size = 56 * node_count + data_len + 630 + tags_len\n return (size + 8) & ~7", - "docstring": "used when allocating memory ingame" - }, - { - "code": "def _apply_skips(self, i, r,\n summarize=False,\n report_unexpected_exceptions=True,\n context=None):\n for skip in self._skips:\n try:\n result = skip(r)\n if result is True:\n yield True\n except Exception as e:\n if report_unexpected_exceptions:\n p = {'code': UNEXPECTED_EXCEPTION}\n if not summarize:\n p['message'] = MESSAGES[UNEXPECTED_EXCEPTION] % (e.__class__.__name__, e)\n p['row'] = i + 1\n p['record'] = r\n p['exception'] = e\n p['function'] = '%s: %s' % (skip.__name__,\n skip.__doc__)\n if context is not None: p['context'] = context\n yield p", - "docstring": "Apply skip functions on `r`." - }, - { - "code": "def seeded_auth_token(client, service, seed):\n hash_func = hashlib.md5()\n token = ','.join((client, service, seed)).encode('utf-8')\n hash_func.update(token)\n return hash_func.hexdigest()", - "docstring": "Return an auth token based on the client+service+seed tuple." - }, - { - "code": "def _assert_all_finite(X):\n X = np.asanyarray(X)\n if (X.dtype.char in np.typecodes['AllFloat'] and\n not np.isfinite(X.sum()) and not np.isfinite(X).all()):\n raise ValueError(\"Input contains NaN, infinity\"\n \" or a value too large for %r.\" % X.dtype)", - "docstring": "Like assert_all_finite, but only for ndarray." - }, - { - "code": "def declarations(self):\n declarations = GetDeclarations()\n for a in self.constraints:\n try:\n declarations.visit(a)\n except RuntimeError:\n if sys.getrecursionlimit() >= PickleSerializer.MAX_RECURSION:\n raise Exception(f'declarations recursion limit surpassed {PickleSerializer.MAX_RECURSION}, aborting')\n new_limit = sys.getrecursionlimit() + PickleSerializer.DEFAULT_RECURSION\n if new_limit <= PickleSerializer.DEFAULT_RECURSION:\n sys.setrecursionlimit(new_limit)\n return self.declarations\n return declarations.result", - "docstring": "Returns the variable expressions of this constraint set" - }, - { - "code": "def html_to_text(cls, html):\n s = cls()\n s.feed(html)\n unescaped_data = s.unescape(s.get_data())\n return escape_for_xml(unescaped_data, tags_to_keep=s.mathml_elements)", - "docstring": "Return stripped HTML, keeping only MathML." - }, - { - "code": "def Search(pattern, s):\n if pattern not in _regexp_compile_cache:\n _regexp_compile_cache[pattern] = sre_compile.compile(pattern)\n return _regexp_compile_cache[pattern].search(s)", - "docstring": "Searches the string for the pattern, caching the compiled regexp." - }, - { - "code": "def getpassword(prompt=\"Password: \"):\n fd = sys.stdin.fileno()\n old = termios.tcgetattr(fd)\n new = termios.tcgetattr(fd)\n new[3] &= ~termios.ECHO\n try:\n termios.tcsetattr(fd, termios.TCSADRAIN, new)\n passwd = raw_input(prompt)\n finally:\n termios.tcsetattr(fd, termios.TCSADRAIN, old)\n return passwd", - "docstring": "get user input without echo" - }, - { - "code": "def fmt(lbaf=3):\n if env():\n cij.err(\"cij.nvme.exists: Invalid NVMe ENV.\")\n return 1\n nvme = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)\n cmd = [\"nvme\", \"format\", nvme[\"DEV_PATH\"], \"-l\", str(lbaf)]\n rcode, _, _ = cij.ssh.command(cmd, shell=True)\n return rcode", - "docstring": "Do format for NVMe device" - }, - { - "code": "def get(self, name):\n for c in self.comps:\n if c.category == name:\n return c\n return None", - "docstring": "Return component by category name" - }, - { - "code": "def msgDict(d,matching=None,sep1=\"=\",sep2=\"\\n\",sort=True,cantEndWith=None):\n msg=\"\"\n if \"record\" in str(type(d)):\n keys=d.dtype.names\n else:\n keys=d.keys()\n if sort:\n keys=sorted(keys)\n for key in keys:\n if key[0]==\"_\":\n continue\n if matching:\n if not key in matching:\n continue\n if cantEndWith and key[-len(cantEndWith)]==cantEndWith:\n continue\n if 'float' in str(type(d[key])):\n s=\"%.02f\"%d[key]\n else:\n s=str(d[key])\n if \"object\" in s:\n s=''\n msg+=key+sep1+s+sep2\n return msg.strip()", - "docstring": "convert a dictionary to a pretty formatted string." - }, - { - "code": "def create_rcontext(self, size, frame):\n if self.format == 'pdf':\n surface = cairo.PDFSurface(self._output_file(frame), *size)\n elif self.format in ('ps', 'eps'):\n surface = cairo.PSSurface(self._output_file(frame), *size)\n elif self.format == 'svg':\n surface = cairo.SVGSurface(self._output_file(frame), *size)\n elif self.format == 'surface':\n surface = self.target\n else:\n surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, *size)\n return cairo.Context(surface)", - "docstring": "Called when CairoCanvas needs a cairo context to draw on" - }, - { - "code": "def encode(self, obj):\n def hint_tuples(item):\n if isinstance(item, tuple):\n return {'__tuple__': True, 'items': item}\n if isinstance(item, list):\n return [hint_tuples(e) for e in item]\n if isinstance(item, dict):\n return {\n key: hint_tuples(val) for key, val in item.iteritems()\n }\n else:\n return item\n return super(Encoder, self).encode(hint_tuples(obj))", - "docstring": "function to encode json string" - }, - { - "code": "def contains_value(self, *values):\n self._check_dict_like(self.val, check_getitem=False)\n if len(values) == 0:\n raise ValueError('one or more value args must be given')\n missing = []\n for v in values:\n if v not in self.val.values():\n missing.append(v)\n if missing:\n self._err('Expected <%s> to contain values %s, but did not contain %s.' % (self.val, self._fmt_items(values), self._fmt_items(missing)))\n return self", - "docstring": "Asserts that val is a dict and contains the given value or values." - }, - { - "code": "def locked_delete(self):\n if self._cache:\n self._cache.delete(self._key_name)\n self._delete_entity()", - "docstring": "Delete Credential from datastore." - }, - { - "code": "def _on_watermark_notification(self, notif):\n if self.get_user(notif.user_id).is_self:\n logger.info('latest_read_timestamp for {} updated to {}'\n .format(self.id_, notif.read_timestamp))\n self_conversation_state = (\n self._conversation.self_conversation_state\n )\n self_conversation_state.self_read_state.latest_read_timestamp = (\n parsers.to_timestamp(notif.read_timestamp)\n )\n previous_timestamp = self._watermarks.get(\n notif.user_id,\n datetime.datetime.min.replace(tzinfo=datetime.timezone.utc)\n )\n if notif.read_timestamp > previous_timestamp:\n logger.info(('latest_read_timestamp for conv {} participant {}' +\n ' updated to {}').format(self.id_,\n notif.user_id.chat_id,\n notif.read_timestamp))\n self._watermarks[notif.user_id] = notif.read_timestamp", - "docstring": "Handle a watermark notification." - }, - { - "code": "def next_item(self):\n queue = self.queue\n try:\n item = queue.get(block=True, timeout=5)\n return item\n except Exception:\n return None", - "docstring": "Get a single item from the queue." - }, - { - "code": "def create_query(section):\n query = {}\n if 'ports' in section:\n query['ports'] = [section['ports']]\n if 'up' in section:\n query['up'] = bool(section['up'])\n if 'search' in section:\n query['search'] = [section['search']]\n if 'tags' in section:\n query['tags'] = [section['tags']]\n if 'groups' in section:\n query['groups'] = [section['groups']]\n return query", - "docstring": "Creates a search query based on the section of the config file." - }, - { - "code": "def find_chunk (phrase, np):\n for i in iter(range(0, len(phrase))):\n parsed_np = find_chunk_sub(phrase, np, i)\n if parsed_np:\n return parsed_np", - "docstring": "leverage noun phrase chunking" - }, - { - "code": "def _replace(_self, **kwds):\n 'Return a new SplitResult object replacing specified fields with new values'\n result = _self._make(map(kwds.pop, ('scheme', 'netloc', 'path', 'query', 'fragment'), _self))\n if kwds:\n raise ValueError('Got unexpected field names: %r' % kwds.keys())\n return result", - "docstring": "Return a new SplitResult object replacing specified fields with new values" - }, - { - "code": "def fluent(func):\n @wraps(func)\n def fluent_interface(instance, *args, **kwargs):\n ret = func(instance, *args, **kwargs)\n if ret is not None:\n return ret\n return instance\n return fluent_interface", - "docstring": "Fluent interface decorator to return self if method return None." - }, - { - "code": "def addLogbooks(self, type=None, logs=[], default=\"\"):\n if type is not None and len(logs) != 0:\n if type in self.logList:\n for logbook in logs:\n if logbook not in self.logList.get(type)[0]:\n self.logList.get(type)[0].append(logbook)\n else:\n self.logList[type] = []\n self.logList[type].append(logs)\n if len(self.logList[type]) > 1 and default != \"\":\n self.logList.get(type)[1] == default\n else:\n self.logList.get(type).append(default)\n self.logType.clear()\n self.logType.addItems(list(self.logList.keys()))\n self.changeLogType()", - "docstring": "Add or change list of logbooks." - }, - { - "code": "def SWAP(self, *operands):\n a = operands[0]\n b = operands[-1]\n return (b,) + operands[1:-1] + (a,)", - "docstring": "Exchange 1st and 2nd stack items" - }, - { - "code": "def delete(self, key_name):\n self._assert_valid_stash()\n if key_name == 'stored_passphrase':\n raise GhostError(\n '`stored_passphrase` is a reserved ghost key name '\n 'which cannot be deleted')\n if not self.get(key_name):\n raise GhostError('Key `{0}` not found'.format(key_name))\n key = self._storage.get(key_name)\n if key.get('lock'):\n raise GhostError(\n 'Key `{0}` is locked and therefore cannot be deleted '\n 'Please unlock the key and try again'.format(key_name))\n deleted = self._storage.delete(key_name)\n audit(\n storage=self._storage.db_path,\n action='DELETE',\n message=json.dumps(dict(key_name=key_name)))\n if not deleted:\n raise GhostError('Failed to delete {0}'.format(key_name))", - "docstring": "Delete a key if it exists." - }, - { - "code": "def NeuralNetLearner(dataset, sizes):\n activations = map(lambda n: [0.0 for i in range(n)], sizes)\n weights = []\n def predict(example):\n unimplemented()\n return predict", - "docstring": "Layered feed-forward network." - }, - { - "code": "def all_comments(self):\n ctype = ContentType.objects.get(app_label__exact=\"happenings\", model__exact='event')\n update_ctype = ContentType.objects.get(app_label__exact=\"happenings\", model__exact='update')\n update_ids = self.update_set.values_list('id', flat=True)\n return Comment.objects.filter(\n Q(content_type=ctype.id, object_pk=self.id) |\n Q(content_type=update_ctype.id, object_pk__in=update_ids)\n )", - "docstring": "Returns combined list of event and update comments." - }, - { - "code": "def already_downloaded(filename):\n cur_file = os.path.join(c.bview_dir, filename)\n old_file = os.path.join(c.bview_dir, 'old', filename)\n if not os.path.exists(cur_file) and not os.path.exists(old_file):\n return False\n return True", - "docstring": "Verify that the file has not already been downloaded." - }, - { - "code": "def draw_triangle(setter, x0, y0, x1, y1, x2, y2, color=None, aa=False):\n draw_line(setter, x0, y0, x1, y1, color, aa)\n draw_line(setter, x1, y1, x2, y2, color, aa)\n draw_line(setter, x2, y2, x0, y0, color, aa)", - "docstring": "Draw triangle with points x0,y0 - x1,y1 - x2,y2" - }, - { - "code": "def age(self, id):\n path = self.hash(id)\n if os.path.exists(path):\n modified = datetime.datetime.fromtimestamp(os.stat(path)[8])\n age = datetime.datetime.today() - modified\n return age.days\n else:\n return 0", - "docstring": "Returns the age of the cache entry, in days." - }, - { - "code": "def foex(a, b):\n return (np.sum(a > b, dtype=float) / len(a) - 0.5) * 100", - "docstring": "Returns the factor of exceedance" - }, - { - "code": "def legal_ogrn():\n ogrn = \"\".join(map(str, [random.randint(1, 9) for _ in range(12)]))\n ogrn += str((int(ogrn) % 11 % 10))\n return ogrn", - "docstring": "Return a random government registration ID for a company." - }, - { - "code": "def exists():\n if env():\n cij.err(\"cij.nvm.exists: Invalid NVMe ENV.\")\n return 1\n nvm = cij.env_to_dict(PREFIX, EXPORTED + REQUIRED)\n cmd = ['[[ -b \"%s\" ]]' % nvm[\"DEV_PATH\"]]\n rcode, _, _ = cij.ssh.command(cmd, shell=True, echo=False)\n return rcode", - "docstring": "Verify that the ENV defined NVMe device exists" - }, - { - "code": "def fix_hypenation (foo):\n i = 0\n bar = []\n while i < len(foo):\n text, lemma, pos, tag = foo[i]\n if (tag == \"HYPH\") and (i > 0) and (i < len(foo) - 1):\n prev_tok = bar[-1]\n next_tok = foo[i + 1]\n prev_tok[0] += \"-\" + next_tok[0]\n prev_tok[1] += \"-\" + next_tok[1]\n bar[-1] = prev_tok\n i += 2\n else:\n bar.append(foo[i])\n i += 1\n return bar", - "docstring": "fix hyphenation in the word list for a parsed sentence" - }, - { - "code": "def upload(self):\n success = False\n batch = self.next()\n if len(batch) == 0:\n return False\n try:\n self.request(batch)\n success = True\n except Exception as e:\n self.log.error('error uploading: %s', e)\n success = False\n if self.on_error:\n self.on_error(e, batch)\n finally:\n for item in batch:\n self.queue.task_done()\n return success", - "docstring": "Upload the next batch of items, return whether successful." - }, - { - "code": "def update(self):\n bulbs = self._hub.get_lights()\n if not bulbs:\n _LOGGER.debug(\"%s is offline, send command failed\", self._zid)\n self._online = False", - "docstring": "Update light objects to their current values." - }, - { - "code": "def startup(self, app):\n self.database.init_async(app.loop)\n if not self.cfg.connection_manual:\n app.middlewares.insert(0, self._middleware)", - "docstring": "Register connection's middleware and prepare self database." - }, - { - "code": "def parse(version):\n match = _REGEX.match(version)\n if match is None:\n raise ValueError('%s is not valid SemVer string' % version)\n verinfo = match.groupdict()\n verinfo['major'] = int(verinfo['major'])\n verinfo['minor'] = int(verinfo['minor'])\n verinfo['patch'] = int(verinfo['patch'])\n return verinfo", - "docstring": "Parse version to major, minor, patch, pre-release, build parts." - }, - { - "code": "def _sm_stop_from_no_pain(self, *args, **kwargs):\n _logger.info(\"Stopping chaos for blockade %s\" % self._blockade_name)\n self._timer.cancel()", - "docstring": "Stop chaos when there is no current blockade operation" - }, - { - "code": "def not_followed_by(parser):\n @tri\n def not_followed_by_block():\n failed = object()\n result = optional(tri(parser), failed)\n if result != failed:\n fail([\"not \" + _fun_to_str(parser)])\n choice(not_followed_by_block)", - "docstring": "Succeeds if the given parser cannot consume input" - }, - { - "code": "def minmax(self, constraints, x, iters=10000):\n if issymbolic(x):\n m = self.min(constraints, x, iters)\n M = self.max(constraints, x, iters)\n return m, M\n else:\n return x, x", - "docstring": "Returns the min and max possible values for x within given constraints" - }, - { - "code": "def list_things_at(self, location, tclass=Thing):\n \"Return all things exactly at a given location.\"\n return [thing for thing in self.things\n if thing.location == location and isinstance(thing, tclass)]", - "docstring": "Return all things exactly at a given location." - }, - { - "code": "def update(self, list_id, segment_id, data):\n return self._mc_client._patch(url=self._build_path(list_id, 'segments', segment_id), data=data)", - "docstring": "updates an existing list segment." - }, - { - "code": "def initialize_boto_client(self):\n self.session = self.create_session()\n self.client = self.session.client('ec2')\n self.ec2 = self.session.resource('ec2')\n self.instances = []\n self.instance_states = {}\n self.vpc_id = 0\n self.sg_id = 0\n self.sn_ids = []", - "docstring": "Initialize the boto client." - }, - { - "code": "def handle_copy(self, dest_path, depth_infinity):\n if \"/by_tag/\" not in dest_path:\n raise DAVError(HTTP_FORBIDDEN)\n catType, tag, _rest = util.save_split(dest_path.strip(\"/\"), \"/\", 2)\n assert catType == \"by_tag\"\n if tag not in self.data[\"tags\"]:\n self.data[\"tags\"].append(tag)\n return True", - "docstring": "Change semantic of COPY to add resource tags." - }, - { - "code": "def getActiveKeyForAccount(self, name):\n account = self.rpc.get_account(name)\n for authority in account[\"active\"][\"key_auths\"]:\n try:\n return self.getPrivateKeyForPublicKey(authority[0])\n except Exception:\n pass\n return False", - "docstring": "Obtain owner Active Key for an account from the wallet database" - }, - { - "code": "def cmd_destroy(opts):\n config = load_config(opts.config)\n b = get_blockade(config, opts)\n b.destroy()", - "docstring": "Destroy all containers and restore networks" - }, - { - "code": "def _convert_suffix_to_docker_chars(suffix):\n accepted_characters = string.ascii_letters + string.digits + '_.-'\n def label_char_transform(char):\n if char in accepted_characters:\n return char\n return '-'\n return ''.join(label_char_transform(c) for c in suffix)", - "docstring": "Rewrite string so that all characters are valid in a docker name suffix." - }, - { - "code": "def SyntheticRestaurant(n=20):\n \"Generate a DataSet with n examples.\"\n def gen():\n example = map(random.choice, restaurant.values)\n example[restaurant.target] = Fig[18,2](example)\n return example\n return RestaurantDataSet([gen() for i in range(n)])", - "docstring": "Generate a DataSet with n examples." - }, - { - "code": "def LJMP(cpu, cs_selector, target):\n logger.info(\"LJMP: Jumping to: %r:%r\", cs_selector.read(), target.read())\n cpu.CS = cs_selector.read()\n cpu.PC = target.read()", - "docstring": "We are just going to ignore the CS selector for now." - }, - { - "code": "def _delete_key_internal(self, *args, **kwargs):\n mimicdb.backend.srem(tpl.bucket % self.name, args[0])\n mimicdb.backend.delete(tpl.key % (self.name, args[0]))\n return super(Bucket, self)._delete_key_internal(*args, **kwargs)", - "docstring": "Remove key name from bucket set." - }, - { - "code": "def query(self, input = '', params = {}):\n payload = {'input': input,\n 'appid': self.appid}\n for key, value in params.items():\n if isinstance(value, (list, tuple)):\n payload[key] = ','.join(value)\n else:\n payload[key] = value\n try:\n r = requests.get(\"http://api.wolframalpha.com/v2/query\", params=payload)\n if r.status_code != 200:\n raise Exception('Invalid response status code: %s' % (r.status_code))\n if r.encoding != 'utf-8':\n raise Exception('Invalid encoding: %s' % (r.encoding))\n except Exception, e:\n return Result(error = e)\n return Result(xml = r.text)", - "docstring": "Query Wolfram Alpha and return a Result object" - }, - { - "code": "def _move(self, index, new_priority):\n item, old_priority = self._memory[index]\n old_priority = old_priority or 0\n self._memory[index] = _SumRow(item, new_priority)\n self._update_internal_nodes(index, new_priority - old_priority)", - "docstring": "Change the priority of a leaf node." - }, - { - "code": "def _aggr_mode(inList):\n valueCounts = dict()\n nonNone = 0\n for elem in inList:\n if elem == SENTINEL_VALUE_FOR_MISSING_DATA:\n continue\n nonNone += 1\n if elem in valueCounts:\n valueCounts[elem] += 1\n else:\n valueCounts[elem] = 1\n if nonNone == 0:\n return None\n sortedCounts = valueCounts.items()\n sortedCounts.sort(cmp=lambda x,y: x[1] - y[1], reverse=True)\n return sortedCounts[0][0]", - "docstring": "Returns most common value seen in the non-None elements of the list" - }, - { - "code": "def remove(self, id, operator=\"=\", key=None):\n if key == None: key = self._key\n try: id = unicode(id)\n except: pass \n sql = \"delete from \"+self._name+\" where \"+key+\" \"+operator+\" ?\"\n self._db._cur.execute(sql, (id,))", - "docstring": "Deletes the row with given id." - }, - { - "code": "def _save(self):\n fulldict = copy.deepcopy(self.__dict__)\n for i, j in fulldict.items():\n if isinstance(j, Params):\n fulldict[i] = j.__dict__\n fulldumps = json.dumps(fulldict,\n sort_keys=False, \n indent=4, \n separators=(\",\", \":\"),\n )\n assemblypath = os.path.join(self.dirs, self.name+\".tet.json\")\n if not os.path.exists(self.dirs):\n os.mkdir(self.dirs)\n done = 0\n while not done:\n try:\n with open(assemblypath, 'w') as jout:\n jout.write(fulldumps)\n done = 1\n except (KeyboardInterrupt, SystemExit): \n print('.')\n continue", - "docstring": "save a JSON file representation of Tetrad Class for checkpoint" - }, - { - "code": "def print_state(self):\n def tile_string(value):\n if value > 0:\n return '% 5d' % (2 ** value,)\n return \" \"\n separator_line = '-' * 25\n print(separator_line)\n for row in range(4):\n print(\"|\" + \"|\".join([tile_string(v) for v in self._state[row, :]]) + \"|\")\n print(separator_line)", - "docstring": "Prints the current state." - }, - { - "code": "def _gethostbyname(self, hostname):\n if self._databaseType in const.IPV6_EDITIONS:\n response = socket.getaddrinfo(hostname, 0, socket.AF_INET6)\n family, socktype, proto, canonname, sockaddr = response[0]\n address, port, flow, scope = sockaddr\n return address\n else:\n return socket.gethostbyname(hostname)", - "docstring": "Hostname lookup method, supports both IPv4 and IPv6." - }, - { - "code": "def transform_title(self, content_metadata_item):\n title_with_locales = []\n for locale in self.enterprise_configuration.get_locales():\n title_with_locales.append({\n 'locale': locale,\n 'value': content_metadata_item.get('title', '')\n })\n return title_with_locales", - "docstring": "Return the title of the content item." - }, - { - "code": "def _calculate_distance(latlon1, latlon2):\n lat1, lon1 = latlon1\n lat2, lon2 = latlon2\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n R = 6371\n a = np.sin(dlat / 2)**2 + np.cos(lat1) * np.cos(lat2) * (np.sin(dlon / 2))**2\n c = 2 * np.pi * R * np.arctan2(np.sqrt(a), np.sqrt(1 - a)) / 180\n return c", - "docstring": "Calculates the distance between two points on earth." - }, - { - "code": "def FromJsonString(self, value):\n self.Clear()\n for path in value.split(','):\n self.paths.append(path)", - "docstring": "Converts string to FieldMask according to proto3 JSON spec." - }, - { - "code": "def main():\n import time\n print('Testing controller in position 1:')\n print('Running 3 x 3 seconds tests')\n con = rController(1)\n for i in range(3):\n print('Waiting...')\n time.sleep(2.5)\n print('State: ', con.gamepad)\n print('Buttons: ', con.buttons)\n time.sleep(0.5)\n print('Done!')", - "docstring": "Test the functionality of the rController object" - }, - { - "code": "def fill_circle(setter, x0, y0, r, color=None):\n _draw_fast_vline(setter, x0, y0 - r, 2 * r + 1, color)\n _fill_circle_helper(setter, x0, y0, r, 3, 0, color)", - "docstring": "Draws a filled circle at point x0,y0 with radius r and specified color" - }, - { - "code": "def log_state(entity, state):\n p = {'on': entity, 'state': state}\n _log(TYPE_CODES.STATE, p)", - "docstring": "Logs a new state of an entity" - }, - { - "code": "def full_analysis(self):\n self.count()\n self.verify_words()\n self.verify_user()\n if self.review_requested == 'yes':\n self.label_suspicious('Review requested')", - "docstring": "Execute the count and verify_words methods." - }, - { - "code": "def write_json(path, params):\n logger.debug(\"write %s to %s\", params, path)\n if path.startswith(\"s3://\"):\n bucket = get_boto3_bucket(path.split(\"/\")[2])\n key = \"/\".join(path.split(\"/\")[3:])\n logger.debug(\"upload %s\", key)\n bucket.put_object(\n Key=key,\n Body=json.dumps(params, sort_keys=True, indent=4)\n )\n else:\n makedirs(os.path.dirname(path))\n with open(path, 'w') as dst:\n json.dump(params, dst, sort_keys=True, indent=4)", - "docstring": "Write local or remote." - }, - { - "code": "def cleanup_directory(config_data):\n if os.path.exists(config_data.project_directory):\n choice = False\n if config_data.noinput is False and not config_data.verbose:\n choice = query_yes_no(\n 'The installation failed.\\n'\n 'Do you want to clean up by removing {0}?\\n'\n '\\tWarning: this will delete all files in:\\n'\n '\\t\\t{0}\\n'\n 'Do you want to cleanup?'.format(\n os.path.abspath(config_data.project_directory)\n ),\n 'no'\n )\n else:\n sys.stdout.write('The installation has failed.\\n')\n if config_data.skip_project_dir_check is False and (choice or\n (config_data.noinput and\n config_data.delete_project_dir)):\n sys.stdout.write('Removing everything under {0}\\n'.format(\n os.path.abspath(config_data.project_directory)\n ))\n shutil.rmtree(config_data.project_directory, True)", - "docstring": "Asks user for removal of project directory and eventually removes it" - }, - { - "code": "def apply(f, obj, *args, **kwargs):\n return vectorize(f)(obj, *args, **kwargs)", - "docstring": "Apply a function in parallel to each element of the input" - }, - { - "code": "def cleanup(self):\n if self.subscription:\n logger.info(\"Deleting worker subscription...\")\n self.subscriber_client.delete_subscription(self.subscription)", - "docstring": "Deletes this worker's subscription." - }, - { - "code": "def parse_groups(output):\n groups = []\n for line in output.split('\\n'):\n m = NDXGROUP.match(line)\n if m:\n d = m.groupdict()\n groups.append({'name': d['GROUPNAME'],\n 'nr': int(d['GROUPNUMBER']),\n 'natoms': int(d['NATOMS'])})\n return groups", - "docstring": "Parse ``make_ndx`` output and return groups as a list of dicts." - }, - { - "code": "def get(self, id, **kwargs):\n return (super(MutableCollection, self).get((id,), **kwargs)\n .get(self.singular, None))", - "docstring": "Get single unit of collection" - }, - { - "code": "def build(ctx, project, build):\n ctx.obj = ctx.obj or {}\n ctx.obj['project'] = project\n ctx.obj['build'] = build", - "docstring": "Commands for build jobs." - }, - { - "code": "def _format(formatter, x):\n formatter.create_dummy_axis()\n formatter.set_locs([val for val in x if ~np.isnan(val)])\n try:\n oom = int(formatter.orderOfMagnitude)\n except AttributeError:\n oom = 0\n labels = [formatter(tick) for tick in x]\n pattern = re.compile(r'\\.0+$')\n for i, label in enumerate(labels):\n match = pattern.search(label)\n if match:\n labels[i] = pattern.sub('', label)\n if oom:\n labels = ['{}e{}'.format(s, oom) if s != '0' else s\n for s in labels]\n return labels", - "docstring": "Helper to format and tidy up" - }, - { - "code": "def render_to_string(self):\n values = ''\n for key, value in self.items():\n values += '{}={};'.format(key, value)\n return values", - "docstring": "Render to cookie strings." - }, - { - "code": "def _handle_module(args):\n module = _get_module_filename(args.module)\n if not module:\n _error(\"Could not load module or package: %r\", args.module)\n elif isinstance(module, Unparseable):\n _error(\"Could not determine module source: %r\", args.module)\n _parse_and_output(module, args)", - "docstring": "Handles the -m argument." - }, - { - "code": "def fillScreen(self, color=None):\n md.fill_rect(self.set, 0, 0, self.width, self.height, color)", - "docstring": "Fill the matrix with the given RGB color" - }, - { - "code": "def search_images(q, start=1, count=10, wait=10, asynchronous=False, cached=False):\n service = YAHOO_IMAGES\n return YahooSearch(q, start, count, service, None, wait, asynchronous, cached)", - "docstring": "Returns a Yahoo images query formatted as a YahooSearch list object." - }, - { - "code": "def _debug_off():\n if _os.path.exists(__debugflag__):\n _os.remove(__debugflag__)\n __loglevel__ = \"ERROR\"\n _LOGGER.info(\"debugging turned off\")\n _set_debug_dict(__loglevel__)", - "docstring": "turns off debugging by removing hidden tmp file" - }, - { - "code": "def SELFDESTRUCT(self, recipient):\n recipient = Operators.EXTRACT(recipient, 0, 160)\n address = self.address\n if issymbolic(recipient):\n logger.info(\"Symbolic recipient on self destruct\")\n recipient = solver.get_value(self.constraints, recipient)\n if recipient not in self.world:\n self.world.create_account(address=recipient)\n self.world.send_funds(address, recipient, self.world.get_balance(address))\n self.world.delete_account(address)\n raise EndTx('SELFDESTRUCT')", - "docstring": "Halt execution and register account for later deletion" - }, - { - "code": "def generateRecords(self, records):\n if self.verbosity>0: print 'Generating', len(records), 'records...'\n for record in records:\n self.generateRecord(record)", - "docstring": "Generate multiple records. Refer to definition for generateRecord" - }, - { - "code": "def _run_timeout_threads(self, handler):\n for dummy, method in inspect.getmembers(handler, callable):\n if not hasattr(method, \"_pyxmpp_timeout\"):\n continue\n thread = TimeoutThread(method, daemon = self.daemon,\n exc_queue = self.exc_queue)\n self.timeout_threads.append(thread)\n thread.start()", - "docstring": "Start threads for a TimeoutHandler." - }, - { - "code": "def getData(self, n):\n records = [self.getNext() for x in range(n)]\n return records", - "docstring": "Returns the next n values for the distribution as a list." - }, - { - "code": "def from_cookie_string(self, cookie_string):\n for key_value in cookie_string.split(';'):\n if '=' in key_value:\n key, value = key_value.split('=', 1)\n else:\n key = key_value\n strip_key = key.strip()\n if strip_key and strip_key.lower() not in COOKIE_ATTRIBUTE_NAMES:\n self[strip_key] = value.strip()", - "docstring": "update self with cookie_string." - }, - { - "code": "def status(self):\n status = []\n if self.provider:\n status = self.provider.status(self.blocks.values())\n return status", - "docstring": "Return status of all blocks." - }, - { - "code": "def draw(self, widget, cr):\n if self.bot_size is None:\n self.draw_default_image(cr)\n return\n cr = driver.ensure_pycairo_context(cr)\n surface = self.backing_store.surface\n cr.set_source_surface(surface)\n cr.paint()", - "docstring": "Draw just the exposed part of the backing store, scaled to fit" - }, - { - "code": "def find_funcdef(source):\n try:\n module_node = compile(\n source, \"\", mode=\"exec\", flags=ast.PyCF_ONLY_AST\n )\n except SyntaxError:\n return find_funcdef(fix_lamdaline(source))\n for node in ast.walk(module_node):\n if isinstance(node, ast.FunctionDef) or isinstance(node, ast.Lambda):\n return node\n raise ValueError(\"function definition not found\")", - "docstring": "Find the first FuncDef ast object in source" - }, - { - "code": "def create_deleted_record(self, record):\n identifier = record_get_field_value(record,\n tag=\"037\",\n code=\"a\")\n recid = identifier.split(\":\")[-1]\n try:\n source = identifier.split(\":\")[1]\n except IndexError:\n source = \"Unknown\"\n record_add_field(record, \"035\",\n subfields=[(\"9\", source), (\"a\", recid)])\n record_add_field(record, \"980\",\n subfields=[(\"c\", \"DELETED\")])\n return record", - "docstring": "Generate the record deletion if deleted form OAI-PMH." - }, - { - "code": "def _new_dynspace(\n self,\n name=None,\n bases=None,\n formula=None,\n refs=None,\n arguments=None,\n source=None,\n ):\n if name is None:\n name = self.spacenamer.get_next(self.namespace)\n if name in self.namespace:\n raise ValueError(\"Name '%s' already exists.\" % name)\n if not is_valid_name(name):\n raise ValueError(\"Invalid name '%s'.\" % name)\n space = RootDynamicSpaceImpl(\n parent=self,\n name=name,\n formula=formula,\n refs=refs,\n source=source,\n arguments=arguments,\n )\n space.is_derived = False\n self._set_space(space)\n if bases:\n dynbase = self._get_dynamic_base(bases)\n space._dynbase = dynbase\n dynbase._dynamic_subs.append(space)\n return space", - "docstring": "Create a new dynamic root space." - }, - { - "code": "def _make_user_class(session, name):\n attrs = session.eval('fieldnames(%s);' % name, nout=1).ravel().tolist()\n methods = session.eval('methods(%s);' % name, nout=1).ravel().tolist()\n ref = weakref.ref(session)\n doc = _DocDescriptor(ref, name)\n values = dict(__doc__=doc, _name=name, _ref=ref, _attrs=attrs,\n __module__='oct2py.dynamic')\n for method in methods:\n doc = _MethodDocDescriptor(ref, name, method)\n cls_name = '%s_%s' % (name, method)\n method_values = dict(__doc__=doc)\n method_cls = type(str(cls_name),\n (OctaveUserClassMethod,), method_values)\n values[method] = method_cls(ref, method, name)\n for attr in attrs:\n values[attr] = OctaveUserClassAttr(ref, attr, attr)\n return type(str(name), (OctaveUserClass,), values)", - "docstring": "Make an Octave class for a given class name" - }, - { - "code": "def make( data, samples ):\n invcffile = os.path.join( data.dirs.consens, data.name+\".vcf\" )\n outlocifile = os.path.join( data.dirs.outfiles, data.name+\".loci\" )\n importvcf( invcffile, outlocifile )", - "docstring": "Convert vcf from step6 to .loci format to facilitate downstream format conversion" - }, - { - "code": "def iso_reference_isvalid(ref):\n ref = str(ref)\n cs_source = ref[4:] + ref[:4]\n return (iso_reference_str2int(cs_source) % 97) == 1", - "docstring": "Validates ISO reference number" - }, - { - "code": "def _tempfile(filename):\n return tempfile.NamedTemporaryFile(mode='w',\n dir=os.path.dirname(filename),\n prefix=os.path.basename(filename),\n suffix=os.fsencode('.tmp'),\n delete=False)", - "docstring": "Create a NamedTemporaryFile instance to be passed to atomic_writer" - }, - { - "code": "def apply_rules(self, device_id, mac_address, rules):\n LOG.info(\"Applying security group rules for device %s with MAC %s\" %\n (device_id, mac_address))\n rule_dict = {SECURITY_GROUP_RULE_KEY: rules}\n redis_key = self.vif_key(device_id, mac_address)\n self.set_field(redis_key, SECURITY_GROUP_HASH_ATTR, rule_dict)\n self.set_field_raw(redis_key, SECURITY_GROUP_ACK, False)", - "docstring": "Writes a series of security group rules to a redis server." - }, - { - "code": "def find_in_app(self, app, path):\n storage = self.storages.get(app, None)\n if storage:\n if storage.exists(path):\n matched_path = storage.path(path)\n if matched_path:\n return matched_path", - "docstring": "Find a requested media file in an app's media fixtures locations." - }, - { - "code": "def md_jdbc_virtual_table(key, node):\n name = node.find(\"name\")\n sql = node.find(\"sql\")\n escapeSql = node.find(\"escapeSql\")\n escapeSql = escapeSql.text if escapeSql is not None else None\n keyColumn = node.find(\"keyColumn\")\n keyColumn = keyColumn.text if keyColumn is not None else None\n n_g = node.find(\"geometry\")\n geometry = JDBCVirtualTableGeometry(n_g.find(\"name\"), n_g.find(\"type\"), n_g.find(\"srid\"))\n parameters = []\n for n_p in node.findall(\"parameter\"):\n p_name = n_p.find(\"name\")\n p_defaultValue = n_p.find(\"defaultValue\")\n p_defaultValue = p_defaultValue.text if p_defaultValue is not None else None\n p_regexpValidator = n_p.find(\"regexpValidator\")\n p_regexpValidator = p_regexpValidator.text if p_regexpValidator is not None else None\n parameters.append(JDBCVirtualTableParam(p_name, p_defaultValue, p_regexpValidator))\n return JDBCVirtualTable(name, sql, escapeSql, geometry, keyColumn, parameters)", - "docstring": "Extract metadata JDBC Virtual Tables from an xml node" - }, - { - "code": "def handle_endtag(self, tag):\n if tag in self.mathml_elements:\n self.fed.append(\"\".format(tag))", - "docstring": "Return representation of html end tag." - }, - { - "code": "def utility(self, state, player):\n \"Return the value to player; 1 for win, -1 for loss, 0 otherwise.\"\n return if_(player == 'X', state.utility, -state.utility)", - "docstring": "Return the value to player; 1 for win, -1 for loss, 0 otherwise." - }, - { - "code": "def _map_timezones():\n tz_map = {}\n todo = HAYSTACK_TIMEZONES_SET.copy()\n for full_tz in pytz.all_timezones:\n if not bool(todo):\n break\n if full_tz in todo:\n tz_map[full_tz] = full_tz\n todo.discard(full_tz)\n continue\n if '/' not in full_tz:\n continue\n (prefix, suffix) = full_tz.split('/',1)\n if '/' in suffix:\n continue\n if suffix in todo:\n tz_map[suffix] = full_tz\n todo.discard(suffix)\n continue\n return tz_map", - "docstring": "Map the official Haystack timezone list to those recognised by pytz." - }, - { - "code": "def read_file(self, filename, destination=''):\n if not destination:\n destination = filename\n log.info('Transferring %s to %s', filename, destination)\n data = self.download_file(filename)\n log.info(destination)\n if not os.path.exists(os.path.dirname(destination)):\n try:\n os.makedirs(os.path.dirname(destination))\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n with open(destination, 'w') as fil:\n fil.write(data)", - "docstring": "reading data from device into local file" - }, - { - "code": "def check_docstring(cls):\n docstring = inspect.getdoc(cls)\n if not docstring:\n breadcrumbs = \" -> \".join(t.__name__ for t in inspect.getmro(cls)[:-1][::-1])\n msg = \"docstring required for plugin '%s' (%s, defined in %s)\"\n args = (cls.__name__, breadcrumbs, cls.__module__)\n raise InternalCashewException(msg % args)\n max_line_length = cls._class_settings.get('max-docstring-length')\n if max_line_length:\n for i, line in enumerate(docstring.splitlines()):\n if len(line) > max_line_length:\n msg = \"docstring line %s of %s is %s chars too long\" \n args = (i, cls.__name__, len(line) - max_line_length)\n raise Exception(msg % args)\n return docstring", - "docstring": "Asserts that the class has a docstring, returning it if successful." - }, - { - "code": "def _get_param(self, param, allowed_values=None, optional=False):\n request_params = self._request_params()\n if param in request_params:\n value = request_params[param].lower()\n if allowed_values is not None:\n if value in allowed_values:\n self.params[param] = value\n else:\n raise OWSInvalidParameterValue(\"%s %s is not supported\" % (param, value), value=param)\n elif optional:\n self.params[param] = None\n else:\n raise OWSMissingParameterValue('Parameter \"%s\" is missing' % param, value=param)\n return self.params[param]", - "docstring": "Get parameter in GET request." - }, - { - "code": "def rst2node(doc_name, data):\n if not data:\n return\n parser = docutils.parsers.rst.Parser()\n document = docutils.utils.new_document('<%s>' % doc_name)\n document.settings = docutils.frontend.OptionParser().get_default_values()\n document.settings.tab_width = 4\n document.settings.pep_references = False\n document.settings.rfc_references = False\n document.settings.env = Env()\n parser.parse(data, document)\n if len(document.children) == 1:\n return document.children[0]\n else:\n par = docutils.nodes.paragraph()\n for child in document.children:\n par += child\n return par", - "docstring": "Converts a reStructuredText into its node" - }, - { - "code": "def login(self, username, password=None, token=None):\n self.session.basic_auth(username, password)", - "docstring": "Login user for protected API calls." - }, - { - "code": "def IsTopLevel(self) -> bool:\n handle = self.NativeWindowHandle\n if handle:\n return GetAncestor(handle, GAFlag.Root) == handle\n return False", - "docstring": "Determine whether current control is top level." - }, - { - "code": "def signal_to_noise_map(self):\n signal_to_noise_map = np.divide(self.image, self.noise_map)\n signal_to_noise_map[signal_to_noise_map < 0] = 0\n return signal_to_noise_map", - "docstring": "The estimated signal-to-noise_maps mappers of the image." - }, - { - "code": "def edit(self):\n input_params = {\n \"name\": self.name,\n \"public_key\": self.public_key,\n }\n data = self.get_data(\n \"account/keys/%s\" % self.id,\n type=PUT,\n params=input_params\n )\n if data:\n self.id = data['ssh_key']['id']", - "docstring": "Edit the SSH Key" - }, - { - "code": "def interactive(proto_dataset_uri):\n proto_dataset = dtoolcore.ProtoDataSet.from_uri(\n uri=proto_dataset_uri,\n config_path=CONFIG_PATH)\n readme_template = _get_readme_template()\n yaml = YAML()\n yaml.explicit_start = True\n yaml.indent(mapping=2, sequence=4, offset=2)\n descriptive_metadata = yaml.load(readme_template)\n descriptive_metadata = _prompt_for_values(descriptive_metadata)\n stream = StringIO()\n yaml.dump(descriptive_metadata, stream)\n proto_dataset.put_readme(stream.getvalue())\n click.secho(\"Updated readme \", fg=\"green\")\n click.secho(\"To edit the readme using your default editor:\")\n click.secho(\n \"dtool readme edit {}\".format(proto_dataset_uri),\n fg=\"cyan\")", - "docstring": "Interactive prompting to populate the readme." - }, - { - "code": "def execute_tuple(self, stream_id, source_component, latency_in_ns):\n self.update_count(self.EXEC_COUNT, key=stream_id)\n self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns, stream_id)\n self.update_count(self.EXEC_TIME_NS, incr_by=latency_in_ns, key=stream_id)\n global_stream_id = source_component + \"/\" + stream_id\n self.update_count(self.EXEC_COUNT, key=global_stream_id)\n self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns, global_stream_id)\n self.update_count(self.EXEC_TIME_NS, incr_by=latency_in_ns, key=global_stream_id)", - "docstring": "Apply updates to the execute metrics" - }, - { - "code": "def main(argv=None):\n if argv is None:\n argv = sys.argv[1:]\n cli = CommandLineTool()\n return cli.run(argv)", - "docstring": "Main command line interface." - }, - { - "code": "def create(self, list_id, data):\n return self._mc_client._post(url=self._build_path(list_id, 'segments'), data=data)", - "docstring": "adds a new segment to the list." - }, - { - "code": "def render(self, name, value, attrs={}):\n if value is None:\n value = ''\n final_attrs = self.build_attrs(attrs, name=name)\n quill_app = apps.get_app_config('quill')\n quill_config = getattr(quill_app, self.config)\n return mark_safe(render_to_string(quill_config['template'], {\n 'final_attrs': flatatt(final_attrs),\n 'value': value,\n 'id': final_attrs['id'],\n 'config': self.config,\n }))", - "docstring": "Render the Quill WYSIWYG." - }, - { - "code": "def parallel_concat_lcdir(lcbasedir,\n objectidlist,\n aperture='TF1',\n postfix='.gz',\n sortby='rjd',\n normalize=True,\n outdir=None,\n recursive=True,\n nworkers=32,\n maxworkertasks=1000):\n if not outdir:\n outdir = 'pklcs'\n if not os.path.exists(outdir):\n os.mkdir(outdir)\n tasks = [(lcbasedir, x, {'aperture':aperture,\n 'postfix':postfix,\n 'sortby':sortby,\n 'normalize':normalize,\n 'outdir':outdir,\n 'recursive':recursive}) for x in objectidlist]\n pool = mp.Pool(nworkers, maxtasksperchild=maxworkertasks)\n results = pool.map(parallel_concat_worker, tasks)\n pool.close()\n pool.join()\n return {x:y for (x,y) in zip(objectidlist, results)}", - "docstring": "This concatenates all text LCs for the given objectidlist." - }, - { - "code": "def collapse_nodes_with_same_names(graph: BELGraph) -> None:\n survivor_mapping = defaultdict(set)\n victims = set()\n it = tqdm(itt.combinations(graph, r=2), total=graph.number_of_nodes() * (graph.number_of_nodes() - 1) / 2)\n for a, b in it:\n if b in victims:\n continue\n a_name, b_name = a.get(NAME), b.get(NAME)\n if not a_name or not b_name or a_name.lower() != b_name.lower():\n continue\n if a.keys() != b.keys():\n continue\n for k in set(a.keys()) - {NAME, NAMESPACE}:\n if a[k] != b[k]:\n continue\n survivor_mapping[a].add(b)\n victims.add(b)\n collapse_nodes(graph, survivor_mapping)", - "docstring": "Collapse all nodes with the same name, merging namespaces by picking first alphabetical one." - }, - { - "code": "def setup(executor):\n def signal_handler(signal_to_handle, frame):\n Log.info('signal_handler invoked with signal %s', signal_to_handle)\n executor.stop_state_manager_watches()\n sys.exit(signal_to_handle)\n def cleanup():\n Log.info('Executor terminated; exiting all process in executor.')\n for pid in executor.processes_to_monitor.keys():\n os.kill(pid, signal.SIGTERM)\n time.sleep(5)\n os.killpg(0, signal.SIGTERM)\n shardid = executor.shard\n log.configure(logfile='heron-executor-%s.stdout' % shardid)\n pid = os.getpid()\n sid = os.getsid(pid)\n if pid <> sid:\n Log.info('Set up process group; executor becomes leader')\n os.setpgrp()\n Log.info('Register the SIGTERM signal handler')\n signal.signal(signal.SIGTERM, signal_handler)\n Log.info('Register the atexit clean up')\n atexit.register(cleanup)", - "docstring": "Set up log, process and signal handlers" - }, - { - "code": "def upstream_url(self, uri):\n \"Returns the URL to the upstream data source for the given URI based on configuration\"\n return self.application.options.upstream + self.request.uri", - "docstring": "Returns the URL to the upstream data source for the given URI based on configuration" - }, - { - "code": "async def set_group_link_sharing_enabled(\n self, set_group_link_sharing_enabled_request\n ):\n response = hangouts_pb2.SetGroupLinkSharingEnabledResponse()\n await self._pb_request('conversations/setgrouplinksharingenabled',\n set_group_link_sharing_enabled_request,\n response)\n return response", - "docstring": "Set whether group link sharing is enabled for a conversation." - }, - { - "code": "def __get_live_version(self):\n try:\n import versiontools\n except ImportError:\n return None\n else:\n return str(versiontools.Version.from_expression(self.name))", - "docstring": "Get a live version string using versiontools" - }, - { - "code": "def _weight(self, rsq, sigma=None):\n sigma = sigma or self.filter_size\n if not self.clip:\n o = np.exp(-rsq / (2*sigma**2))\n else:\n o = np.zeros(rsq.shape, dtype='float')\n m = (rsq < self.clipsize**2)\n o[m] = np.exp(-rsq[m] / (2*sigma**2))\n return o", - "docstring": "weighting function for Barnes" - }, - { - "code": "def compute_utility(self, board, move, player):\n \"If X wins with this move, return 1; if O return -1; else return 0.\"\n if (self.k_in_row(board, move, player, (0, 1)) or\n self.k_in_row(board, move, player, (1, 0)) or\n self.k_in_row(board, move, player, (1, -1)) or\n self.k_in_row(board, move, player, (1, 1))):\n return if_(player == 'X', +1, -1)\n else:\n return 0", - "docstring": "If X wins with this move, return 1; if O return -1; else return 0." - }, - { - "code": "def legal_inn():\n mask = [2, 4, 10, 3, 5, 9, 4, 6, 8]\n inn = [random.randint(1, 9) for _ in range(10)]\n weighted = [v * mask[i] for i, v in enumerate(inn[:-1])]\n inn[9] = sum(weighted) % 11 % 10\n return \"\".join(map(str, inn))", - "docstring": "Return a random taxation ID number for a company." - }, - { - "code": "def _loadConfig(self):\n config_dicts = [self.additional_config, self.app_config] + [t.getConfig() for t in self.hierarchy]\n config_blame = [\n _mirrorStructure(self.additional_config, 'command-line config'),\n _mirrorStructure(self.app_config, 'application\\'s config.json'),\n ] + [\n _mirrorStructure(t.getConfig(), t.getName()) for t in self.hierarchy\n ]\n self.config = _mergeDictionaries(*config_dicts)\n self.config_blame = _mergeDictionaries(*config_blame)", - "docstring": "load the configuration information from the target hierarchy" - }, - { - "code": "def _decode(self, data, algorithm, key=None):\n if algorithm['type'] == 'hmac':\n verify_signature = data[-algorithm['hash_size']:]\n data = data[:-algorithm['hash_size']]\n signature = self._hmac_generate(data, algorithm, key)\n if not const_equal(verify_signature, signature):\n raise Exception('Invalid signature')\n return data\n elif algorithm['type'] == 'aes':\n return self._aes_decrypt(data, algorithm, key)\n elif algorithm['type'] == 'no-serialization':\n return data\n elif algorithm['type'] == 'json':\n return json.loads(data)\n elif algorithm['type'] == 'no-compression':\n return data\n elif algorithm['type'] == 'gzip':\n return self._zlib_decompress(data, algorithm)\n else:\n raise Exception('Algorithm not supported: %s' % algorithm['type'])", - "docstring": "Decode data with specific algorithm" - }, - { - "code": "def createDataOutLink(network, sensorRegionName, regionName):\n network.link(sensorRegionName, regionName, \"UniformLink\", \"\",\n srcOutput=\"dataOut\", destInput=\"bottomUpIn\")", - "docstring": "Link sensor region to other region so that it can pass it data." - }, - { - "code": "def _build_admin_context(request, customer):\n opts = customer._meta\n codename = get_permission_codename('change', opts)\n has_change_permission = request.user.has_perm('%s.%s' % (opts.app_label, codename))\n return {\n 'has_change_permission': has_change_permission,\n 'opts': opts\n }", - "docstring": "Build common admin context." - }, - { - "code": "def _add_default_tz_bindings(self, context, switch, network_id):\n default_tz = CONF.NVP.default_tz\n if not default_tz:\n LOG.warn(\"additional_default_tz_types specified, \"\n \"but no default_tz. Skipping \"\n \"_add_default_tz_bindings().\")\n return\n if not network_id:\n LOG.warn(\"neutron network_id not specified, skipping \"\n \"_add_default_tz_bindings()\")\n return\n for net_type in CONF.NVP.additional_default_tz_types:\n if net_type in TZ_BINDINGS:\n binding = TZ_BINDINGS[net_type]\n binding.add(context, switch, default_tz, network_id)\n else:\n LOG.warn(\"Unknown default tz type %s\" % (net_type))", - "docstring": "Configure any additional default transport zone bindings." - }, - { - "code": "def col_values(df, col_name):\n _check_cols(df, [col_name])\n if 'O' in df[col_name] or pd.np.issubdtype(df[col_name].dtype, str):\n return [nom.lower() for nom in df[pd.notnull(df)][col_name] if not pd.isnull(nom)]\n else:\n return [nom for nom in df[pd.notnull(df)][col_name] if not pd.isnull(nom)]", - "docstring": "Return a list of not null values from the `col_name` column of `df`." - }, - { - "code": "def swatch(self, x, y, w=35, h=35, roundness=0):\n _ctx.fill(self)\n _ctx.rect(x, y, w, h, roundness)", - "docstring": "Rectangle swatch for this color." - }, - { - "code": "def send_payload(self, params):\n data = json.dumps({\n 'jsonrpc': self.version,\n 'method': self.service_name,\n 'params': params,\n 'id': text_type(uuid.uuid4())\n })\n data_binary = data.encode('utf-8')\n url_request = Request(self.service_url, data_binary, headers=self.headers)\n return urlopen(url_request).read()", - "docstring": "Performs the actual sending action and returns the result" - }, - { - "code": "def trun_to_file(trun, fpath=None):\n if fpath is None:\n fpath = yml_fpath(trun[\"conf\"][\"OUTPUT\"])\n with open(fpath, 'w') as yml_file:\n data = yaml.dump(trun, explicit_start=True, default_flow_style=False)\n yml_file.write(data)", - "docstring": "Dump the given trun to file" - }, - { - "code": "def write_json_response(self, response):\n self.write(tornado.escape.json_encode(response))\n self.set_header(\"Content-Type\", \"application/json\")", - "docstring": "write back json response" - }, - { - "code": "def UnPlug(self, force=False):\n if force:\n _xinput.UnPlugForce(c_uint(self.id))\n else:\n _xinput.UnPlug(c_uint(self.id))\n while self.id not in self.available_ids():\n if self.id == 0:\n break", - "docstring": "Unplug controller from Virtual USB Bus and free up ID" - }, - { - "code": "def _sub(self, other):\n if isinstance(other, self.__class__):\n sub = self._ip_dec - other._ip_dec\n if isinstance(other, int):\n sub = self._ip_dec - other\n else:\n other = self.__class__(other)\n sub = self._ip_dec - other._ip_dec\n return sub", - "docstring": "Subtract two IP addresses." - }, - { - "code": "def print_params(self, details=True, session=None):\n for i, p in enumerate(self.all_params):\n if details:\n try:\n val = p.eval(session=session)\n logging.info(\n \" param {:3}: {:20} {:15} {} (mean: {:<18}, median: {:<18}, std: {:<18}) \".\n format(i, p.name, str(val.shape), p.dtype.name, val.mean(), np.median(val), val.std())\n )\n except Exception as e:\n logging.info(str(e))\n raise Exception(\n \"Hint: print params details after tl.layers.initialize_global_variables(sess) \"\n \"or use network.print_params(False).\"\n )\n else:\n logging.info(\" param {:3}: {:20} {:15} {}\".format(i, p.name, str(p.get_shape()), p.dtype.name))\n logging.info(\" num of params: %d\" % self.count_params())", - "docstring": "Print all info of parameters in the network" - }, - { - "code": "def get(self, request, *args, **kwargs):\n form_class = self.get_form_class()\n form = self.get_form(form_class)\n inlines = self.construct_inlines()\n return self.render_to_response(self.get_context_data(form=form, inlines=inlines, **kwargs))", - "docstring": "Handles GET requests and instantiates a blank version of the form and formsets." - }, - { - "code": "def MSTORE(self, address, value):\n if istainted(self.pc):\n for taint in get_taints(self.pc):\n value = taint_with(value, taint)\n self._allocate(address, 32)\n self._store(address, value, 32)", - "docstring": "Save word to memory" - }, - { - "code": "def serve(destination, port, config):\n if os.path.exists(destination):\n pass\n elif os.path.exists(config):\n settings = read_settings(config)\n destination = settings.get('destination')\n if not os.path.exists(destination):\n sys.stderr.write(\"The '{}' directory doesn't exist, maybe try \"\n \"building first?\\n\".format(destination))\n sys.exit(1)\n else:\n sys.stderr.write(\"The {destination} directory doesn't exist \"\n \"and the config file ({config}) could not be read.\\n\"\n .format(destination=destination, config=config))\n sys.exit(2)\n print('DESTINATION : {}'.format(destination))\n os.chdir(destination)\n Handler = server.SimpleHTTPRequestHandler\n httpd = socketserver.TCPServer((\"\", port), Handler, False)\n print(\" * Running on http://127.0.0.1:{}/\".format(port))\n try:\n httpd.allow_reuse_address = True\n httpd.server_bind()\n httpd.server_activate()\n httpd.serve_forever()\n except KeyboardInterrupt:\n print('\\nAll done!')", - "docstring": "Run a simple web server." - }, - { - "code": "def actions(self, state):\n \"In the leftmost empty column, try all non-conflicting rows.\"\n if state[-1] is not None:\n return []\n else:\n col = state.index(None)\n return [row for row in range(self.N)\n if not self.conflicted(state, row, col)]", - "docstring": "In the leftmost empty column, try all non-conflicting rows." - }, - { - "code": "def add_arguments(parser):\n parser.add_argument('-e', '--environment', help='Environment name', required=False, nargs='+')\n parser.add_argument('-w', '--dont-wait', help='Skip waiting for the app to be deleted', action='store_true')", - "docstring": "Args for the init command" - }, - { - "code": "def _add_uninstall(self, context):\n contents = self._render_template('uninstall.sh', context)\n self.config.setdefault('files', [])\n self._add_unique_file({\n \"path\": \"/uninstall.sh\",\n \"contents\": contents,\n \"mode\": \"755\"\n })", - "docstring": "generates uninstall.sh and adds it to included files" - }, - { - "code": "def condense_ranges(cls, ranges):\n result = []\n if ranges:\n ranges.sort(key=lambda tup: tup[0])\n result.append(ranges[0])\n for i in range(1, len(ranges)):\n if result[-1][1] + 1 >= ranges[i][0]:\n result[-1] = (result[-1][0], max(result[-1][1], ranges[i][1]))\n else:\n result.append(ranges[i])\n return result", - "docstring": "Sorts and removes overlaps" - }, - { - "code": "def _keep_alive_thread(self):\n while True:\n with self._lock:\n if self.connected():\n self._ws.ping()\n else:\n self.disconnect()\n self._thread = None\n return\n sleep(30)", - "docstring": "Used exclusively as a thread which keeps the WebSocket alive." - }, - { - "code": "def _combineargs(self, *args, **kwargs):\n d = {arg: True for arg in args}\n d.update(kwargs)\n return d", - "docstring": "Add switches as 'options' with value True to the options dict." - }, - { - "code": "def FindMethodByName(self, name):\n for method in self.methods:\n if name == method.name:\n return method\n return None", - "docstring": "Searches for the specified method, and returns its descriptor." - }, - { - "code": "def expandpath(path):\n return os.path.abspath(os.path.expandvars(os.path.expanduser(path)))", - "docstring": "Returns an absolute expanded path" - }, - { - "code": "def clear_descendants(self, source, clear_source=True):\n removed = self.cellgraph.clear_descendants(source, clear_source)\n for node in removed:\n del node[OBJ].data[node[KEY]]", - "docstring": "Clear values and nodes calculated from `source`." - }, - { - "code": "def fromdict(cls, config, check_fields=True):\n m = super(Config, cls).__new__(cls)\n m.path = '.'\n m.verbose = False\n m.config = m._merge_defaults(config)\n if check_fields:\n m._check_fields()\n return m", - "docstring": "Create a Config object from config dict directly." - }, - { - "code": "def from_privkey(cls, privkey, prefix=None):\n privkey = PrivateKey(privkey, prefix=prefix or Prefix.prefix)\n secret = unhexlify(repr(privkey))\n order = ecdsa.SigningKey.from_string(\n secret, curve=ecdsa.SECP256k1\n ).curve.generator.order()\n p = ecdsa.SigningKey.from_string(\n secret, curve=ecdsa.SECP256k1\n ).verifying_key.pubkey.point\n x_str = ecdsa.util.number_to_string(p.x(), order)\n compressed = hexlify(chr(2 + (p.y() & 1)).encode(\"ascii\") + x_str).decode(\n \"ascii\"\n )\n return cls(compressed, prefix=prefix or Prefix.prefix)", - "docstring": "Derive uncompressed public key" - }, - { - "code": "def print_cm(cm, labels, hide_zeroes=False, hide_diagonal=False, hide_threshold=None):\n columnwidth = max([len(x) for x in labels] + [5])\n empty_cell = \" \" * columnwidth\n print(\" \" + empty_cell, end=\" \")\n for label in labels:\n print(\"%{0}s\".format(columnwidth) % label, end=\" \")\n print()\n for i, label1 in enumerate(labels):\n print(\" %{0}s\".format(columnwidth) % label1, end=\" \")\n for j in range(len(labels)):\n cell = \"%{0}.1f\".format(columnwidth) % cm[i, j]\n if hide_zeroes:\n cell = cell if float(cm[i, j]) != 0 else empty_cell\n if hide_diagonal:\n cell = cell if i != j else empty_cell\n if hide_threshold:\n cell = cell if cm[i, j] > hide_threshold else empty_cell\n print(cell, end=\" \")\n print()", - "docstring": "pretty print for confusion matrixes" - }, - { - "code": "def on_packet(packet):\n print(\"Framenumber: {}\".format(packet.framenumber))\n header, markers = packet.get_3d_markers()\n print(\"Component info: {}\".format(header))\n for marker in markers:\n print(\"\\t\", marker)", - "docstring": "Callback function that is called everytime a data packet arrives from QTM" - }, - { - "code": "def merge_with_published(self):\n pid, first = self.fetch_published()\n lca = first.revisions[self['_deposit']['pid']['revision_id']]\n args = [lca.dumps(), first.dumps(), self.dumps()]\n for arg in args:\n del arg['$schema'], arg['_deposit']\n args.append({})\n m = Merger(*args)\n try:\n m.run()\n except UnresolvedConflictsException:\n raise MergeConflict()\n return patch(m.unified_patches, lca)", - "docstring": "Merge changes with latest published version." - }, - { - "code": "def handle(self):\n while True:\n try:\n line = self.rfile.readline()\n try:\n cmd = json.loads(line)\n except Exception, exc:\n self.wfile.write(repr(exc) + NEWLINE)\n continue\n else:\n handler = getattr(self, 'handle_' + cmd[0], None)\n if not handler:\n self.wfile.write(\n repr(CommandNotFound(cmd[0])) + NEWLINE)\n continue\n return_value = handler(*cmd[1:])\n if not return_value:\n continue\n self.wfile.write(\n one_lineify(json.dumps(return_value)) + NEWLINE)\n except Exception, exc:\n self.wfile.write(repr(exc) + NEWLINE)\n continue", - "docstring": "The main function called to handle a request." - }, - { - "code": "def up(self):\n i = self.index()\n if i != None:\n del self.canvas.layers[i]\n i = min(len(self.canvas.layers), i+1)\n self.canvas.layers.insert(i, self)", - "docstring": "Moves the layer up in the stacking order." - }, - { - "code": "def asyncClose(fn):\n @functools.wraps(fn)\n def wrapper(*args, **kwargs):\n f = asyncio.ensure_future(fn(*args, **kwargs))\n while not f.done():\n QApplication.instance().processEvents()\n return wrapper", - "docstring": "Allow to run async code before application is closed." - }, - { - "code": "def offset(self, node):\n x = self.x + node.x - _ctx.WIDTH/2\n y = self.y + node.y - _ctx.HEIGHT/2\n return x, y", - "docstring": "Returns the distance from the center to the given node." - }, - { - "code": "def _getnodenamefor(self, name):\n \"Return the node name where the ``name`` would land to\"\n return 'node_' + str(\n (abs(binascii.crc32(b(name)) & 0xffffffff) % self.no_servers) + 1)", - "docstring": "Return the node name where the ``name`` would land to" - }, - { - "code": "def configure(self):\n lm = self.last_manifest\n for tracker in self.get_trackers():\n self.vprint('Checking tracker:', tracker)\n last_thumbprint = lm['_tracker_%s' % tracker.get_natural_key_hash()]\n self.vprint('last thumbprint:', last_thumbprint)\n has_changed = tracker.is_changed(last_thumbprint)\n self.vprint('Tracker changed:', has_changed)\n if has_changed:\n self.vprint('Change detected!')\n tracker.act()", - "docstring": "The standard method called to apply functionality when the manifest changes." - }, - { - "code": "def maint_up(self):\n r = self.local_renderer\n fn = self.render_to_file(r.env.maintenance_template, extra={'current_hostname': self.current_hostname})\n r.put(local_path=fn, remote_path=r.env.maintenance_path, use_sudo=True)\n r.sudo('chown -R {apache_web_user}:{apache_web_group} {maintenance_path}')", - "docstring": "Forwards all traffic to a page saying the server is down for maintenance." - }, - { - "code": "def course_or_program_exist(self, course_id, program_uuid):\n course_exists = course_id and CourseApiClient().get_course_details(course_id)\n program_exists = program_uuid and CourseCatalogApiServiceClient().program_exists(program_uuid)\n return course_exists or program_exists", - "docstring": "Return whether the input course or program exist." - }, - { - "code": "def shutdown(self, msg, args):\n self.log.info(\"Received shutdown from %s\", msg.user.username)\n self._bot.runnable = False\n return \"Shutting down...\"", - "docstring": "Causes the bot to gracefully shutdown." - }, - { - "code": "def remove_images():\n client = get_docker_client()\n removed = _remove_dangling_images()\n dusty_images = get_dusty_images()\n all_images = client.images(all=True)\n for image in all_images:\n if set(image['RepoTags']).intersection(dusty_images):\n try:\n client.remove_image(image['Id'])\n except Exception as e:\n logging.info(\"Couldn't remove image {}\".format(image['RepoTags']))\n else:\n log_to_client(\"Removed Image {}\".format(image['RepoTags']))\n removed.append(image)\n return removed", - "docstring": "Removes all dangling images as well as all images referenced in a dusty spec; forceful removal is not used" - }, - { - "code": "def traverse(element, query, deep=False):\n part = query[0]\n if not part:\n query = query[1:]\n part = query[0]\n deep = True\n part, predicate = xpath_re.match(query[0]).groups()\n for c in element._children:\n if part in ('*', c.tagname) and c._match(predicate):\n if len(query) == 1:\n yield c\n else:\n for e in traverse(c, query[1:]):\n yield e\n if deep:\n for e in traverse(c, query, deep=True):\n yield e", - "docstring": "Helper function to traverse an element tree rooted at element, yielding nodes matching the query." - }, - { - "code": "def run(self):\n with LiveExecution.lock:\n if self.edited_source:\n success, ex = self.run_tenuous()\n if success:\n return\n self.do_exec(self.known_good, self.ns)", - "docstring": "Attempt to known good or tenuous source." - }, - { - "code": "def shell(cmd, check=True, stdin=None, stdout=None, stderr=None):\n return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr)", - "docstring": "Runs a subprocess shell with check=True by default" - }, - { - "code": "def outer_right_join(self, join_streamlet, window_config, join_function):\n from heronpy.streamlet.impl.joinbolt import JoinStreamlet, JoinBolt\n join_streamlet_result = JoinStreamlet(JoinBolt.OUTER_RIGHT, window_config,\n join_function, self, join_streamlet)\n self._add_child(join_streamlet_result)\n join_streamlet._add_child(join_streamlet_result)\n return join_streamlet_result", - "docstring": "Return a new Streamlet by outer right join_streamlet with this streamlet" - }, - { - "code": "def exit(self):\r\n if self._engine:\r\n self._engine.repl.terminate()\r\n self._engine = None", - "docstring": "Quits this octave session and cleans up." - }, - { - "code": "def _base_repr(self, and_also=None):\n items = [\n \"=\".join((key, repr(getattr(self, key))))\n for key in sorted(self._fields.keys())]\n if items:\n output = \", \".join(items)\n else:\n output = None\n if and_also:\n return \"{}({}, {})\".format(self.__class__.__name__,\n output, and_also)\n else:\n return \"{}({})\".format(self.__class__.__name__, output)", - "docstring": "Common repr logic for subclasses to hook" - }, - { - "code": "def checkOut(thing,html=True):\n msg=\"\"\n for name in sorted(dir(thing)):\n if not \"__\" in name:\n msg+=\"%s\\n\"%name\n try:\n msg+=\" ^-VALUE: %s\\n\"%getattr(thing,name)()\n except:\n pass\n if html:\n html=''+msg+''\n html=html.replace(\" \",\" \").replace(\"\\n\",\"
\")\n fname = tempfile.gettempdir()+\"/swhlab/checkout.html\"\n with open(fname,'w') as f:\n f.write(html)\n webbrowser.open(fname)\n print(msg.replace('','').replace('',''))", - "docstring": "show everything we can about an object's projects and methods." - }, - { - "code": "def _CCompiler_spawn_silent(cmd, dry_run=None):\n proc = Popen(cmd, stdout=PIPE, stderr=PIPE)\n out, err = proc.communicate()\n if proc.returncode:\n raise DistutilsExecError(err)", - "docstring": "Spawn a process, and eat the stdio." - }, - { - "code": "def create_bodies(self):\n self.bodies = {}\n for label in self.channels:\n body = self.world.create_body(\n 'sphere', name='marker:{}'.format(label), radius=0.02)\n body.is_kinematic = True\n body.color = 0.9, 0.1, 0.1, 0.5\n self.bodies[label] = body", - "docstring": "Create physics bodies corresponding to each marker in our data." - }, - { - "code": "def setup_mnu_style(self, editor):\n menu = QtWidgets.QMenu('Styles', self.menuEdit)\n group = QtWidgets.QActionGroup(self)\n self.styles_group = group\n current_style = editor.syntax_highlighter.color_scheme.name\n group.triggered.connect(self.on_style_changed)\n for s in sorted(PYGMENTS_STYLES):\n a = QtWidgets.QAction(menu)\n a.setText(s)\n a.setCheckable(True)\n if s == current_style:\n a.setChecked(True)\n group.addAction(a)\n menu.addAction(a)\n self.menuEdit.addMenu(menu)", - "docstring": "setup the style menu for an editor tab" - }, - { - "code": "def add_config_files_to_archive(directory, filename, config={}):\n with zipfile.ZipFile(filename, 'a') as zip_file:\n for conf in config:\n for conf, tree in list(conf.items()):\n if 'yaml' in tree:\n content = yaml.dump(tree['yaml'], default_flow_style=False)\n else:\n content = tree.get('content', '')\n out(\"Adding file \" + str(conf) + \" to archive \" + str(filename))\n file_entry = zipfile.ZipInfo(conf)\n file_entry.external_attr = tree.get('permissions', 0o644) << 16 \n zip_file.writestr(file_entry, content)\n return filename", - "docstring": "Adds configuration files to an existing archive" - }, - { - "code": "def ensure_remote_branch_is_tracked(branch):\n if branch == MASTER_BRANCH:\n return\n output = subprocess.check_output(['git', 'branch', '--list'])\n for line in output.split('\\n'):\n if line.strip() == branch:\n break\n else:\n try:\n sys.stdout.write(subprocess.check_output(\n ['git', 'checkout', '--track', 'origin/%s' % branch]))\n except subprocess.CalledProcessError:\n raise SystemExit(1)", - "docstring": "Track the specified remote branch if it is not already tracked." - }, - { - "code": "def update(self, points, values=None):\n self.values = values or [None] * len(points)\n if np is None:\n if self.option.function:\n warnings.warn('numpy not available, function ignored')\n self.points = points\n self.minimum = min(self.points)\n self.maximum = max(self.points)\n self.current = self.points[-1]\n else:\n self.points = self.apply_function(points)\n self.minimum = np.min(self.points)\n self.maximum = np.max(self.points)\n self.current = self.points[-1]\n if self.maximum == self.minimum:\n self.extents = 1\n else:\n self.extents = (self.maximum - self.minimum)\n self.extents = (self.maximum - self.minimum)", - "docstring": "Add a set of data points." - }, - { - "code": "def main(graph: BELGraph, xlsx: str, tsvs: str):\n if not xlsx and not tsvs:\n click.secho('Specify at least one option --xlsx or --tsvs', fg='red')\n sys.exit(1)\n spia_matrices = bel_to_spia_matrices(graph)\n if xlsx:\n spia_matrices_to_excel(spia_matrices, xlsx)\n if tsvs:\n spia_matrices_to_tsvs(spia_matrices, tsvs)", - "docstring": "Export the graph to a SPIA Excel sheet." - }, - { - "code": "def count_relations(self) -> int:\n if self.edge_model is ...:\n raise Bio2BELMissingEdgeModelError('edge_edge model is undefined/count_bel_relations is not overridden')\n elif isinstance(self.edge_model, list):\n return sum(self._count_model(m) for m in self.edge_model)\n else:\n return self._count_model(self.edge_model)", - "docstring": "Count the number of BEL relations generated." - }, - { - "code": "def count_defaultdict(dict_of_lists: Mapping[X, List[Y]]) -> Mapping[X, typing.Counter[Y]]:\n return {\n k: Counter(v)\n for k, v in dict_of_lists.items()\n }", - "docstring": "Count the number of elements in each value of the dictionary." - }, - { - "code": "def label_suspicious(self, reason):\n self.suspicion_reasons.append(reason)\n self.is_suspect = True", - "docstring": "Add suspicion reason and set the suspicious flag." - }, - { - "code": "async def async_connect(self):\n if self._waiters is None:\n raise Exception('Error, database not properly initialized before async connection')\n if self._waiters or self.max_connections and (len(self._in_use) >= self.max_connections):\n waiter = asyncio.Future(loop=self._loop)\n self._waiters.append(waiter)\n try:\n logger.debug('Wait for connection.')\n await waiter\n finally:\n self._waiters.remove(waiter)\n self.connect()\n return self._state.conn", - "docstring": "Asyncronously wait for a connection from the pool." - }, - { - "code": "def register_saver_ops(self):\n variables = self.get_savable_variables()\n if variables is None or len(variables) == 0:\n self._saver = None\n return\n base_scope = self._get_base_variable_scope()\n variables_map = {strip_name_scope(v.name, base_scope): v for v in variables}\n self._saver = tf.train.Saver(\n var_list=variables_map,\n reshape=False,\n sharded=False,\n max_to_keep=5,\n keep_checkpoint_every_n_hours=10000.0,\n name=None,\n restore_sequentially=False,\n saver_def=None,\n builder=None,\n defer_build=False,\n allow_empty=True,\n write_version=tf.train.SaverDef.V2,\n pad_step_number=False,\n save_relative_paths=True\n )", - "docstring": "Registers the saver operations to the graph in context." - }, - { - "code": "def indexOf(a, b):\n \"Return the first index of b in a.\"\n for i, j in enumerate(a):\n if j == b:\n return i\n else:\n raise ValueError('sequence.index(x): x not in sequence')", - "docstring": "Return the first index of b in a." - }, - { - "code": "def parse_markdown():\n readme_file = f'{PACKAGE_ROOT}/README.md'\n if path.exists(readme_file):\n with open(readme_file, 'r', encoding='utf-8') as f:\n long_description = f.read()\n return long_description", - "docstring": "Parse markdown as description" - }, - { - "code": "def _transaction_end(self):\n self._command.append('\\x87')\n self._ft232h._write(''.join(self._command))\n return bytearray(self._ft232h._poll_read(self._expected))", - "docstring": "End I2C transaction and get response bytes, including ACKs." - }, - { - "code": "def utime(self, *args, **kwargs):\n os.utime(self.extended_path, *args, **kwargs)", - "docstring": "Set the access and modified times of the file specified by path." - }, - { - "code": "def _add_cycle_free(model, fluxes):\n model.objective = model.solver.interface.Objective(\n Zero, direction=\"min\", sloppy=True)\n objective_vars = []\n for rxn in model.reactions:\n flux = fluxes[rxn.id]\n if rxn.boundary:\n rxn.bounds = (flux, flux)\n continue\n if flux >= 0:\n rxn.bounds = max(0, rxn.lower_bound), max(flux, rxn.upper_bound)\n objective_vars.append(rxn.forward_variable)\n else:\n rxn.bounds = min(flux, rxn.lower_bound), min(0, rxn.upper_bound)\n objective_vars.append(rxn.reverse_variable)\n model.objective.set_linear_coefficients({v: 1.0 for v in objective_vars})", - "docstring": "Add constraints for CycleFreeFlux." - }, - { - "code": "def as_dict(self):\n self_as_dict = dict()\n self_as_dict['sequence'] = self.sequence\n if hasattr(self, 'frequency'):\n self_as_dict['frequency'] = self.frequency\n return self_as_dict", - "docstring": "Return Allele data as dict object." - }, - { - "code": "def _derive_y_from_x(self, x, is_even):\n curve = ecdsa.SECP256k1.curve\n a, b, p = curve.a(), curve.b(), curve.p()\n alpha = (pow(x, 3, p) + a * x + b) % p\n beta = ecdsa.numbertheory.square_root_mod_prime(alpha, p)\n if (beta % 2) == is_even:\n beta = p - beta\n return beta", - "docstring": "Derive y point from x point" - }, - { - "code": "def delete_granule(self, coverage, store, granule_id, workspace=None):\n params = dict()\n workspace_name = workspace\n if isinstance(store, basestring):\n store_name = store\n else:\n store_name = store.name\n workspace_name = store.workspace.name\n if workspace_name is None:\n raise ValueError(\"Must specify workspace\")\n url = build_url(\n self.service_url,\n [\n \"workspaces\",\n workspace_name,\n \"coveragestores\",\n store_name,\n \"coverages\",\n coverage,\n \"index/granules\",\n granule_id,\n \".json\"\n ],\n params\n )\n headers = {\n \"Content-type\": \"application/json\",\n \"Accept\": \"application/json\"\n }\n resp = self.http_request(url, method='delete', headers=headers)\n if resp.status_code != 200:\n FailedRequestError('Failed to delete granule from mosaic {} : {}, {}'.format(store, resp.status_code, resp.text))\n self._cache.clear()\n return None", - "docstring": "Deletes a granule of an existing imagemosaic" - }, - { - "code": "def go(self, state, direction):\n \"Return the state that results from going in this direction.\"\n state1 = vector_add(state, direction)\n return if_(state1 in self.states, state1, state)", - "docstring": "Return the state that results from going in this direction." - }, - { - "code": "def connect(self, host):\n if not self.app.connect(host):\n command = \"Connect({0})\".format(host).encode(\"ascii\")\n self.exec_command(command)\n self.last_host = host", - "docstring": "Connect to a host" - }, - { - "code": "def write_index_translation(translation_filename, entity_ids, relation_ids):\n translation = triple_pb.Translation()\n entities = []\n for name, index in entity_ids.items():\n translation.entities.add(element=name, index=index)\n relations = []\n for name, index in relation_ids.items():\n translation.relations.add(element=name, index=index)\n with open(translation_filename, \"wb\") as f:\n f.write(translation.SerializeToString())", - "docstring": "write triples into a translation file." - }, - { - "code": "def to_dataframe(\n self,\n columns=BindingPrediction.fields + (\"length\",)):\n return pd.DataFrame.from_records(\n [tuple([getattr(x, name) for name in columns]) for x in self],\n columns=columns)", - "docstring": "Converts collection of BindingPrediction objects to DataFrame" - }, - { - "code": "def nearest_int(x):\n if x == 0:\n return np.int64(0)\n elif x > 0:\n return np.int64(x + 0.5)\n else:\n return np.int64(x - 0.5)", - "docstring": "Return nearest long integer to x" - }, - { - "code": "def unregister(self, provider_class):\n if not issubclass(provider_class, BaseProvider):\n raise TypeError('%s must be a subclass of BaseProvider' % provider_class.__name__)\n if provider_class not in self._registered_providers:\n raise NotRegistered('%s is not registered' % provider_class.__name__)\n self._registered_providers.remove(provider_class)\n self.invalidate_providers()", - "docstring": "Unregisters a provider from the site." - }, - { - "code": "def refmap_init(data, sample, force):\n sample.files.unmapped_reads = os.path.join(data.dirs.edits, \n \"{}-refmap_derep.fastq\".format(sample.name))\n sample.files.mapped_reads = os.path.join(data.dirs.refmapping,\n \"{}-mapped-sorted.bam\".format(sample.name))", - "docstring": "create some file handles for refmapping" - }, - { - "code": "def send_command(self, command):\n with self._lock:\n try:\n self._socket.send(command.encode(\"utf8\"))\n result = self.receive()\n while result.startswith(\"S\") or result.startswith(\"NEW\"):\n _LOGGER.debug(\"!Got response: %s\", result)\n result = self.receive()\n _LOGGER.debug(\"Received: %s\", result)\n return result\n except socket.error as error:\n _LOGGER.error(\"Error sending command: %s\", error)\n self.connect()\n return \"\"", - "docstring": "Send TCP command to hub and return response." - }, - { - "code": "def initial_sanity_check(self):\r\n self.try_import(self.project_name)\r\n self.validate_name(self.project_name)\r\n if os.path.exists(self.project_name):\r\n print(\"Directory {} already exist. Aborting.\".format(self.project_name))\r\n return False\r\n if os.path.exists('manage.py'):\r\n print(\"A manage.py file already exist in the current directory. Aborting.\")\r\n return False\r\n return True", - "docstring": "Checks if we can create the project" - }, - { - "code": "def resolve_ambigs(tmpseq):\n for ambig in np.uint8([82, 83, 75, 87, 89, 77]):\n idx, idy = np.where(tmpseq == ambig)\n res1, res2 = AMBIGS[ambig.view(\"S1\")]\n halfmask = np.random.choice([True, False], idx.shape[0])\n for i in xrange(halfmask.shape[0]):\n if halfmask[i]:\n tmpseq[idx[i], idy[i]] = np.array(res1).view(np.uint8)\n else:\n tmpseq[idx[i], idy[i]] = np.array(res2).view(np.uint8)\n return tmpseq", - "docstring": "returns a seq array with 'RSKYWM' randomly replaced with resolved bases" - }, - { - "code": "def retrieve(self, cursor):\n assert isinstance(cursor, dict), \"expected cursor type 'dict'\"\n query = self.get_query()\n assert isinstance(query, peewee.Query)\n query\n return query.get(**cursor)", - "docstring": "Retrieve items from query" - }, - { - "code": "async def _playnow(self, ctx, *, query: str):\r\n player = self.bot.lavalink.players.get(ctx.guild.id)\r\n if not player.queue and not player.is_playing:\r\n return await ctx.invoke(self._play, query=query)\r\n query = query.strip('<>')\r\n if not url_rx.match(query):\r\n query = f'ytsearch:{query}'\r\n results = await self.bot.lavalink.get_tracks(query)\r\n if not results or not results['tracks']:\r\n return await ctx.send('Nothing found!')\r\n tracks = results['tracks']\r\n track = tracks.pop(0)\r\n if results['loadType'] == 'PLAYLIST_LOADED':\r\n for _track in tracks:\r\n player.add(requester=ctx.author.id, track=_track)\r\n await player.play_now(requester=ctx.author.id, track=track)", - "docstring": "Plays immediately a song." - }, - { - "code": "def merge_to_one_seq(infile, outfile, seqname='union'):\n seq_reader = sequences.file_reader(infile)\n seqs = []\n for seq in seq_reader:\n seqs.append(copy.copy(seq))\n new_seq = ''.join([seq.seq for seq in seqs])\n if type(seqs[0]) == sequences.Fastq:\n new_qual = ''.join([seq.qual for seq in seqs])\n seqs[:] = []\n merged = sequences.Fastq(seqname, new_seq, new_qual)\n else:\n merged = sequences.Fasta(seqname, new_seq)\n seqs[:] = []\n f = utils.open_file_write(outfile)\n print(merged, file=f)\n utils.close(f)", - "docstring": "Takes a multi fasta or fastq file and writes a new file that contains just one sequence, with the original sequences catted together, preserving their order" - }, - { - "code": "def handle_tls_connected_event(self, event):\n if self.settings[\"tls_verify_peer\"]:\n valid = self.settings[\"tls_verify_callback\"](event.stream,\n event.peer_certificate)\n if not valid:\n raise SSLError(\"Certificate verification failed\")\n event.stream.tls_established = True\n with event.stream.lock:\n event.stream._restart_stream()", - "docstring": "Verify the peer certificate on the `TLSConnectedEvent`." - }, - { - "code": "def create_disjunction_node(self, disjunction):\n node = BNode()\n type_triple = (node, RDF.type, self.spdx_namespace.DisjunctiveLicenseSet)\n self.graph.add(type_triple)\n licenses = self.licenses_from_tree(disjunction)\n for lic in licenses:\n member_triple = (node, self.spdx_namespace.member, lic)\n self.graph.add(member_triple)\n return node", - "docstring": "Return a node representing a disjunction of licenses." - }, - { - "code": "def reload_cache_config(self, call_params):\n path = '/' + self.api_version + '/ReloadCacheConfig/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Reload Plivo Cache Config helper" - }, - { - "code": "def getbalance(self, url='http://services.ambientmobile.co.za/credits'):\n postXMLList = []\n postXMLList.append(\"%s\" % self.api_key)\n postXMLList.append(\"%s\" % self.password)\n postXML = '%s' % \"\".join(postXMLList)\n result = self.curl(url, postXML)\n if result.get(\"credits\", None):\n return result[\"credits\"]\n else:\n raise AmbientSMSError(result[\"status\"])", - "docstring": "Get the number of credits remaining at AmbientSMS" - }, - { - "code": "def fetch_by_name(self, name):\n service = self.collection.find_one({'name': name})\n if not service:\n raise ServiceNotFound\n return Service(service)", - "docstring": "Gets service for given ``name`` from mongodb storage." - }, - { - "code": "def git_path_valid(git_path=None):\n if git_path is None and GIT_PATH is None:\n return False\n if git_path is None: git_path = GIT_PATH\n try:\n call([git_path, '--version'])\n return True\n except OSError:\n return False", - "docstring": "Check whether the git executable is found." - }, - { - "code": "def _flatten(iterable):\n for i in iterable:\n if isinstance(i, Iterable) and not isinstance(i, string_types):\n for sub_i in _flatten(i):\n yield sub_i\n else:\n yield i", - "docstring": "Given an iterable with nested iterables, generate a flat iterable" - }, - { - "code": "def __netjson_protocol(self, radio):\n htmode = radio.get('htmode')\n hwmode = radio.get('hwmode', None)\n if htmode.startswith('HT'):\n return '802.11n'\n elif htmode.startswith('VHT'):\n return '802.11ac'\n return '802.{0}'.format(hwmode)", - "docstring": "determines NetJSON protocol radio attribute" - }, - { - "code": "def add_edges(self):\n for group, edgelist in self.edges.items():\n for (u, v, d) in edgelist:\n self.draw_edge(u, v, d, group)", - "docstring": "Draws all of the edges in the graph." - }, - { - "code": "def reload_programs(self):\r\n print(\"Reloading programs:\")\r\n for name, program in self._programs.items():\r\n if getattr(program, 'program', None):\r\n print(\" - {}\".format(program.meta.label))\r\n program.program = resources.programs.load(program.meta)", - "docstring": "Reload all shader programs with the reloadable flag set" - }, - { - "code": "def create(self, server):\n if len(self.geometries) == 0:\n raise Exception('no geometries')\n return server.post(\n 'task_admin',\n self.as_payload(),\n replacements={\n 'slug': self.__challenge__.slug,\n 'identifier': self.identifier})", - "docstring": "Create the task on the server" - }, - { - "code": "def _get_init_args(self, skip=4):\n stack = inspect.stack()\n if len(stack) < skip + 1:\n raise ValueError(\"The length of the inspection stack is shorter than the requested start position.\")\n args, _, _, values = inspect.getargvalues(stack[skip][0])\n params = {}\n for arg in args:\n if values[arg] is not None and arg not in ['self', 'prev_layer', 'inputs']:\n val = values[arg]\n if inspect.isfunction(val):\n params[arg] = {\"module_path\": val.__module__, \"func_name\": val.__name__}\n elif arg.endswith('init'):\n continue\n else:\n params[arg] = val\n return params", - "docstring": "Get all arguments of current layer for saving the graph." - }, - { - "code": "def dump_grid(grid):\n header = 'ver:%s' % dump_str(str(grid._version), version=grid._version)\n if bool(grid.metadata):\n header += ' ' + dump_meta(grid.metadata, version=grid._version)\n columns = dump_columns(grid.column, version=grid._version)\n rows = dump_rows(grid)\n return '\\n'.join([header, columns] + rows + [''])", - "docstring": "Dump a single grid to its ZINC representation." - }, - { - "code": "def _save(self):\n if not os.path.exists(self.cache):\n os.makedirs(self.cache)\n path = os.path.join(self.cache, self.name + \".xml\")\n f = open(path, \"w\")\n f.write(self.xml)\n f.close()", - "docstring": "Saves the color information in the cache as XML." - }, - { - "code": "def module_resolver(resolver, path):\n if resolver.resolved:\n if isinstance(resolver.resolved[0], VirtualEnvironment):\n env = resolver.resolved[0]\n mod = env.get_module(path)\n if mod:\n return mod\n raise ResolveError", - "docstring": "Resolves module in previously resolved environment." - }, - { - "code": "def parse_ext_doc_ref(self, ext_doc_ref_term):\n for _s, _p, o in self.graph.triples(\n (ext_doc_ref_term,\n self.spdx_namespace['externalDocumentId'],\n None)):\n try:\n self.builder.set_ext_doc_id(self.doc, six.text_type(o))\n except SPDXValueError:\n self.value_error('EXT_DOC_REF_VALUE', 'External Document ID')\n break\n for _s, _p, o in self.graph.triples(\n (ext_doc_ref_term,\n self.spdx_namespace['spdxDocument'],\n None)):\n try:\n self.builder.set_spdx_doc_uri(self.doc, six.text_type(o))\n except SPDXValueError:\n self.value_error('EXT_DOC_REF_VALUE', 'SPDX Document URI')\n break\n for _s, _p, checksum in self.graph.triples(\n (ext_doc_ref_term, self.spdx_namespace['checksum'], None)):\n for _, _, value in self.graph.triples(\n (checksum, self.spdx_namespace['checksumValue'], None)):\n try:\n self.builder.set_chksum(self.doc, six.text_type(value))\n except SPDXValueError:\n self.value_error('EXT_DOC_REF_VALUE', 'Checksum')\n break", - "docstring": "Parses the External Document ID, SPDX Document URI and Checksum." - }, - { - "code": "def imgmin(self):\n if not hasattr(self, '_imgmin'):\n imgmin = _np.min(self.images[0])\n for img in self.images:\n imin = _np.min(img)\n if imin > imgmin:\n imgmin = imin\n self._imgmin = imgmin\n return _np.min(self.image)", - "docstring": "Lowest value of input image." - }, - { - "code": "def reject_milestone_request(session, milestone_request_id):\n params_data = {\n 'action': 'reject',\n }\n endpoint = 'milestone_requests/{}'.format(milestone_request_id)\n response = make_put_request(session, endpoint, params_data=params_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n raise MilestoneRequestNotRejectedException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Reject a milestone request" - }, - { - "code": "def _default_value_only(self):\n line = self.source[self.col_offset:]\n regex = re.compile()\n match = regex.match(line)\n if not match:\n return ''\n return match.group(1)", - "docstring": "Return only the default value, if there is one." - }, - { - "code": "def handle_rereduce(self, reduce_function_names, values):\n reduce_functions = []\n for reduce_function_name in reduce_function_names:\n try:\n reduce_function = get_function(reduce_function_name)\n if getattr(reduce_function, 'view_decorated', None):\n reduce_function = reduce_function(self.log)\n reduce_functions.append(reduce_function)\n except Exception, exc:\n self.log(repr(exc))\n reduce_functions.append(lambda *args, **kwargs: None)\n results = []\n for reduce_function in reduce_functions:\n try:\n results.append(reduce_function(None, values, rereduce=True))\n except Exception, exc:\n self.log(repr(exc))\n results.append(None)\n return [True, results]", - "docstring": "Re-reduce a set of values, with a list of rereduction functions." - }, - { - "code": "def cp(resume, quiet, dataset_uri, dest_base_uri):\n _copy(resume, quiet, dataset_uri, dest_base_uri)", - "docstring": "Copy a dataset to a different location." - }, - { - "code": "def as_call(self):\n default = self._default()\n default = ', ' + default if default else ''\n return \"pyconfig.%s(%r%s)\" % (self.method, self.get_key(), default)", - "docstring": "Return this call as it is called in its source." - }, - { - "code": "def clustdealer(pairdealer, optim):\n ccnt = 0\n chunk = []\n while ccnt < optim:\n try:\n taker = itertools.takewhile(lambda x: x[0] != \"//\\n\", pairdealer)\n oneclust = [\"\".join(taker.next())]\n except StopIteration:\n return 1, chunk\n while 1:\n try:\n oneclust.append(\"\".join(taker.next()))\n except StopIteration:\n break\n chunk.append(\"\".join(oneclust))\n ccnt += 1\n return 0, chunk", - "docstring": "return optim clusters given iterators, and whether it got all or not" - }, - { - "code": "def get(self, guild_id):\r\n if guild_id not in self._players:\r\n p = self._player(lavalink=self.lavalink, guild_id=guild_id)\r\n self._players[guild_id] = p\r\n return self._players[guild_id]", - "docstring": "Returns a player from the cache, or creates one if it does not exist." - }, - { - "code": "def yaml_write_data(yaml_data, filename):\n with open(filename, 'w') as fd:\n yaml.dump(yaml_data, fd, default_flow_style=False)\n return True\n return False", - "docstring": "Write data into a .yml file" - }, - { - "code": "def _hashkey(self, method, url, **kwa):\n to_hash = ''.join([str(method), str(url),\n str(kwa.get('data', '')),\n str(kwa.get('params', ''))\n ])\n return hashlib.md5(to_hash.encode()).hexdigest()", - "docstring": "Find a hash value for the linear combination of invocation methods." - }, - { - "code": "def _hook_write_mem(self, uc, access, address, size, value, data):\n self._mem_delta[address] = (value, size)\n return True", - "docstring": "Captures memory written by Unicorn" - }, - { - "code": "def overview():\n range_search = RangeSearch()\n ranges = range_search.get_ranges()\n if ranges:\n formatted_ranges = []\n tags_lookup = {}\n for r in ranges:\n formatted_ranges.append({'mask': r.range})\n tags_lookup[r.range] = r.tags\n search = Host.search()\n search = search.filter('term', status='up')\n search.aggs.bucket('hosts', 'ip_range', field='address', ranges=formatted_ranges)\n response = search.execute()\n print_line(\"{0:<18} {1:<6} {2}\".format(\"Range\", \"Count\", \"Tags\"))\n print_line(\"-\" * 60)\n for entry in response.aggregations.hosts.buckets:\n print_line(\"{0:<18} {1:<6} {2}\".format(entry.key, entry.doc_count, tags_lookup[entry.key]))\n else:\n print_error(\"No ranges defined.\")", - "docstring": "Creates a overview of the hosts per range." - }, - { - "code": "def from_spec(spec, kwargs=None):\n distribution = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.core.distributions.distributions,\n kwargs=kwargs\n )\n assert isinstance(distribution, Distribution)\n return distribution", - "docstring": "Creates a distribution from a specification dict." - }, - { - "code": "def _tile_coords(self, bounds):\n tfm = partial(pyproj.transform,\n pyproj.Proj(init=\"epsg:3857\"),\n pyproj.Proj(init=\"epsg:4326\"))\n bounds = ops.transform(tfm, box(*bounds)).bounds\n west, south, east, north = bounds\n epsilon = 1.0e-10\n if east != west and north != south:\n west += epsilon\n south += epsilon\n east -= epsilon\n north -= epsilon\n params = [west, south, east, north, [self.zoom_level]]\n tile_coords = [(tile.x, tile.y) for tile in mercantile.tiles(*params)]\n xtiles, ytiles = zip(*tile_coords)\n minx = min(xtiles)\n miny = min(ytiles)\n maxx = max(xtiles) \n maxy = max(ytiles)\n return minx, miny, maxx, maxy", - "docstring": "convert mercator bbox to tile index limits" - }, - { - "code": "def start_scan(self, timeout_sec=TIMEOUT_SEC):\n self._scan_started.clear()\n self._adapter.StartDiscovery()\n if not self._scan_started.wait(timeout_sec):\n raise RuntimeError('Exceeded timeout waiting for adapter to start scanning!')", - "docstring": "Start scanning for BLE devices with this adapter." - }, - { - "code": "def gce_list_aggregated(service=None, key_name='name', **kwargs):\n resp_list = []\n req = service.aggregatedList(**kwargs)\n while req is not None:\n resp = req.execute()\n for location, item in resp['items'].items():\n if key_name in item:\n resp_list.extend(item[key_name])\n req = service.aggregatedList_next(previous_request=req,\n previous_response=resp)\n return resp_list", - "docstring": "General aggregated list function for the GCE service." - }, - { - "code": "def dropbox_editor_factory(request):\n dropbox = dropbox_factory(request)\n if is_equal(dropbox.editor_token, request.matchdict['editor_token'].encode('utf-8')):\n return dropbox\n else:\n raise HTTPNotFound('invalid editor token')", - "docstring": "this factory also requires the editor token" - }, - { - "code": "def _build_pipeline_input_file_param(cls, var_name, docker_path):\n path, filename = os.path.split(docker_path)\n if '*' in filename:\n return cls._build_pipeline_file_param(var_name, path + '/')\n else:\n return cls._build_pipeline_file_param(var_name, docker_path)", - "docstring": "Return a dict object representing a pipeline input argument." - }, - { - "code": "def create(self, validated_data):\n ret = []\n for attrs in validated_data:\n if 'non_field_errors' not in attrs and not any(isinstance(attrs[field], list) for field in attrs):\n ret.append(self.child.create(attrs))\n else:\n ret.append(attrs)\n return ret", - "docstring": "This selectively calls the child create method based on whether or not validation failed for each payload." - }, - { - "code": "def transform_from_local(xp, yp, cphi, sphi, mx, my):\n x = xp * cphi - yp * sphi + mx\n y = xp * sphi + yp * cphi + my\n return (x,y)", - "docstring": "Transform from the local frame to absolute space." - }, - { - "code": "def die(msg, code=-1):\n sys.stderr.write(msg + \"\\n\")\n sys.exit(code)", - "docstring": "Writes msg to stderr and exits with return code" - }, - { - "code": "def parse_glob(pattern):\n if not pattern:\n return\n bits = pattern.split(\"/\")\n dirs, filename = bits[:-1], bits[-1]\n for dirname in dirs:\n if dirname == \"**\":\n yield \"(|.+/)\"\n else:\n yield glob2re(dirname) + \"/\"\n yield glob2re(filename)", - "docstring": "Generate parts of regex transformed from glob pattern." - }, - { - "code": "def addInstance(self, groundTruth, prediction, record = None, result = None):\n self.value = self.avg(prediction)", - "docstring": "Compute and store metric value" - }, - { - "code": "def variables(self):\n for table in self.tables:\n if isinstance(table, VariableTable):\n for statement in table.rows:\n if statement[0] != \"\":\n yield statement", - "docstring": "Generator which returns all of the statements in all of the variables tables" - }, - { - "code": "def capture(cmd, **kw):\n kw = kw.copy()\n kw['hide'] = 'out'\n if not kw.get('echo', False):\n kw['echo'] = False\n ignore_failures = kw.pop('ignore_failures', False)\n try:\n return invoke_run(cmd, **kw).stdout.strip()\n except exceptions.Failure as exc:\n if not ignore_failures:\n notify.error(\"Command `{}` failed with RC={}!\".format(cmd, exc.result.return_code,))\n raise", - "docstring": "Run a command and return its stripped captured output." - }, - { - "code": "def disassemble(self):\n ser_pb = open(self.input_file, 'rb').read()\n fd = FileDescriptorProto()\n fd.ParseFromString(ser_pb)\n self.name = fd.name\n self._print('// Reversed by pbd (https://github.com/rsc-dev/pbd)')\n self._print('syntax = \"proto2\";')\n self._print('')\n if len(fd.package) > 0:\n self._print('package {};'.format(fd.package))\n self.package = fd.package\n else:\n self._print('// Package not defined')\n self._walk(fd)", - "docstring": "Disassemble serialized protocol buffers file." - }, - { - "code": "def usage_palette(parser):\n parser.print_usage()\n print('')\n print('available palettes:')\n for palette in sorted(PALETTE):\n print(' %-12s' % (palette,))\n return 0", - "docstring": "Show usage and available palettes." - }, - { - "code": "def from_spec(spec, kwargs=None):\n optimizer = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.core.optimizers.optimizers,\n kwargs=kwargs\n )\n assert isinstance(optimizer, Optimizer)\n return optimizer", - "docstring": "Creates an optimizer from a specification dict." - }, - { - "code": "def redirect_if_blocked(course_run_ids, user=None, ip_address=None, url=None):\n for course_run_id in course_run_ids:\n redirect_url = embargo_api.redirect_if_blocked(\n CourseKey.from_string(course_run_id),\n user=user,\n ip_address=ip_address,\n url=url\n )\n if redirect_url:\n return redirect_url", - "docstring": "Return redirect to embargo error page if the given user is blocked." - }, - { - "code": "def tf_import_demo_experience(self, states, internals, actions, terminal, reward):\n return self.demo_memory.store(\n states=states,\n internals=internals,\n actions=actions,\n terminal=terminal,\n reward=reward\n )", - "docstring": "Imports a single experience to memory." - }, - { - "code": "def using(context, alias):\n if alias == '':\n yield context\n else:\n try:\n widgets = context.render_context[WIDGET_CONTEXT_KEY]\n except KeyError:\n raise template.TemplateSyntaxError('No widget libraries loaded!')\n try:\n block_set = widgets[alias]\n except KeyError:\n raise template.TemplateSyntaxError('No widget library loaded for alias: %r' % alias)\n context.render_context.push()\n context.render_context[BLOCK_CONTEXT_KEY] = block_set\n context.render_context[WIDGET_CONTEXT_KEY] = widgets\n yield context\n context.render_context.pop()", - "docstring": "Temporarily update the context to use the BlockContext for the given alias." - }, - { - "code": "def clear_cache(module_name: str, keep_database: bool = True) -> None:\n data_dir = get_data_dir(module_name)\n if not os.path.exists(data_dir):\n return\n for name in os.listdir(data_dir):\n if name in {'config.ini', 'cfg.ini'}:\n continue\n if name == 'cache.db' and keep_database:\n continue\n path = os.path.join(data_dir, name)\n if os.path.isdir(path):\n shutil.rmtree(path)\n else:\n os.remove(path)\n os.rmdir(data_dir)", - "docstring": "Clear all downloaded files." - }, - { - "code": "def convert_aa_code(x):\n if len(x) == 1:\n return amino_acid_codes[x.upper()]\n elif len(x) == 3:\n return inverse_aa_codes[x.upper()]\n else:\n raise ValueError(\"Can only convert 1-letter or 3-letter amino acid codes, \"\n \"not %r\" % x)", - "docstring": "Converts between 3-letter and 1-letter amino acid codes." - }, - { - "code": "def _map_arg(arg):\n if isinstance(arg, _ast.Str):\n return repr(arg.s)\n elif isinstance(arg, _ast.Num):\n return arg.n\n elif isinstance(arg, _ast.Name):\n name = arg.id\n if name == 'True':\n return True\n elif name == 'False':\n return False\n elif name == 'None':\n return None\n return name\n else:\n return Unparseable()", - "docstring": "Return `arg` appropriately parsed or mapped to a usable value." - }, - { - "code": "def regex_find(pattern, content):\n find = re.findall(pattern, content)\n if not find:\n cij.err(\"pattern <%r> is invalid, no matches!\" % pattern)\n cij.err(\"content: %r\" % content)\n return ''\n if len(find) >= 2:\n cij.err(\"pattern <%r> is too simple, matched more than 2!\" % pattern)\n cij.err(\"content: %r\" % content)\n return ''\n return find[0]", - "docstring": "Find the given 'pattern' in 'content" - }, - { - "code": "def getAccountsFromPublicKey(self, pub):\n names = self.rpc.get_key_references([str(pub)])[0]\n for name in names:\n yield name", - "docstring": "Obtain all accounts associated with a public key" - }, - { - "code": "def _rindex(mylist: Sequence[T], x: T) -> int:\n return len(mylist) - mylist[::-1].index(x) - 1", - "docstring": "Index of the last occurrence of x in the sequence." - }, - { - "code": "def trun_exit(trun):\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:trun:exit\")\n rcode = 0\n for hook in reversed(trun[\"hooks\"][\"exit\"]):\n rcode = script_run(trun, hook)\n if rcode:\n break\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:trun::exit { rcode: %r }\" % rcode, rcode)\n return rcode", - "docstring": "Triggers when exiting the given testrun" - }, - { - "code": "def predict_peptides(self, peptides):\n from mhcflurry.encodable_sequences import EncodableSequences\n binding_predictions = []\n encodable_sequences = EncodableSequences.create(peptides)\n for allele in self.alleles:\n predictions_df = self.predictor.predict_to_dataframe(\n encodable_sequences, allele=allele)\n for (_, row) in predictions_df.iterrows():\n binding_prediction = BindingPrediction(\n allele=allele,\n peptide=row.peptide,\n affinity=row.prediction,\n percentile_rank=(\n row.prediction_percentile\n if 'prediction_percentile' in row else nan),\n prediction_method_name=\"mhcflurry\"\n )\n binding_predictions.append(binding_prediction)\n return BindingPredictionCollection(binding_predictions)", - "docstring": "Predict MHC affinity for peptides." - }, - { - "code": "def getInstanceJstack(self, topology_info, instance_id):\n pid_response = yield getInstancePid(topology_info, instance_id)\n try:\n http_client = tornado.httpclient.AsyncHTTPClient()\n pid_json = json.loads(pid_response)\n pid = pid_json['stdout'].strip()\n if pid == '':\n raise Exception('Failed to get pid')\n endpoint = utils.make_shell_endpoint(topology_info, instance_id)\n url = \"%s/jstack/%s\" % (endpoint, pid)\n response = yield http_client.fetch(url)\n Log.debug(\"HTTP call for url: %s\", url)\n raise tornado.gen.Return(response.body)\n except tornado.httpclient.HTTPError as e:\n raise Exception(str(e))", - "docstring": "Fetches Instance jstack from heron-shell." - }, - { - "code": "def _remove_timeout_handler(self, handler):\n if handler not in self.timeout_handlers:\n return\n self.timeout_handlers.remove(handler)\n for thread in self.timeout_threads:\n if thread.method.im_self is handler:\n thread.stop()", - "docstring": "Remove a TimeoutHandler from the pool." - }, - { - "code": "def age(self):\n if self.rounds == 1:\n self.do_run = False\n elif self.rounds > 1:\n self.rounds -= 1", - "docstring": "Get closer to your EOL" - }, - { - "code": "def write_temp_file_or_dryrun(content, *args, **kwargs):\n dryrun = get_dryrun(kwargs.get('dryrun'))\n if dryrun:\n fd, tmp_fn = tempfile.mkstemp()\n os.remove(tmp_fn)\n cmd_run = 'local'\n cmd = 'cat <> %s\\n%s\\nEOT' % (tmp_fn, content)\n if BURLAP_COMMAND_PREFIX:\n print('%s %s: %s' % (render_command_prefix(), cmd_run, cmd))\n else:\n print(cmd)\n else:\n fd, tmp_fn = tempfile.mkstemp()\n fout = open(tmp_fn, 'w')\n fout.write(content)\n fout.close()\n return tmp_fn", - "docstring": "Writes the given content to a local temporary file." - }, - { - "code": "def home_resolver(resolver, path):\n from .api import get_home_path\n path = unipath(get_home_path(), path)\n if is_environment(path):\n return VirtualEnvironment(path)\n raise ResolveError", - "docstring": "Resolves VirtualEnvironments in CPENV_HOME" - }, - { - "code": "def absolute_signal_to_noise_map(self):\n return np.divide(np.abs(self.image), self.noise_map)", - "docstring": "The estimated absolute_signal-to-noise_maps mappers of the image." - }, - { - "code": "def belanno(keyword: str, file: TextIO):\n directory = get_data_dir(keyword)\n obo_url = f'http://purl.obolibrary.org/obo/{keyword}.obo'\n obo_path = os.path.join(directory, f'{keyword}.obo')\n obo_cache_path = os.path.join(directory, f'{keyword}.obo.pickle')\n obo_getter = make_obo_getter(obo_url, obo_path, preparsed_path=obo_cache_path)\n graph = obo_getter()\n convert_obo_graph_to_belanno(\n graph,\n file=file,\n )", - "docstring": "Write as a BEL annotation." - }, - { - "code": "def rgb_to_ansi16(r, g, b, use_bright=False):\n ansi_b = round(b / 255.0) << 2\n ansi_g = round(g / 255.0) << 1\n ansi_r = round(r / 255.0)\n ansi = (90 if use_bright else 30) + (ansi_b | ansi_g | ansi_r)\n return ansi", - "docstring": "Convert RGB to ANSI 16 color" - }, - { - "code": "def islast(generator):\n next_x = None\n first = True\n for x in generator:\n if not first:\n yield (next_x, False)\n next_x = x\n first = False\n if not first:\n yield (next_x, True)", - "docstring": "indicate whether the current item is the last one in a generator" - }, - { - "code": "def _call_structure(mname, ename, sname, name, workdir, seed, ntaxa, nsites, kpop, rep):\n outname = os.path.join(workdir, \"{}-K-{}-rep-{}\".format(name, kpop, rep))\n cmd = [\"structure\", \n \"-m\", mname, \n \"-e\", ename, \n \"-K\", str(kpop),\n \"-D\", str(seed), \n \"-N\", str(ntaxa), \n \"-L\", str(nsites),\n \"-i\", sname, \n \"-o\", outname]\n proc = subprocess.Popen(cmd,\n stdout=subprocess.PIPE, \n stderr=subprocess.STDOUT)\n comm = proc.communicate()\n oldfiles = [mname, ename, sname]\n for oldfile in oldfiles:\n if os.path.exists(oldfile):\n os.remove(oldfile)\n return comm", - "docstring": "make the subprocess call to structure" - }, - { - "code": "def from_children(cls, program_uuid, *children):\n if not children or any(child is None for child in children):\n return None\n granted = all((child.granted for child in children))\n exists = any((child.exists for child in children))\n usernames = set([child.username for child in children])\n enterprises = set([child.enterprise_customer for child in children])\n if not len(usernames) == len(enterprises) == 1:\n raise InvalidProxyConsent(\n 'Children used to create a bulk proxy consent object must '\n 'share a single common username and EnterpriseCustomer.'\n )\n username = children[0].username\n enterprise_customer = children[0].enterprise_customer\n return cls(\n enterprise_customer=enterprise_customer,\n username=username,\n program_uuid=program_uuid,\n exists=exists,\n granted=granted,\n child_consents=children\n )", - "docstring": "Build a ProxyDataSharingConsent using the details of the received consent records." - }, - { - "code": "def mosaic_coverages(self, store):\n params = dict()\n url = build_url(\n self.service_url,\n [\n \"workspaces\",\n store.workspace.name,\n \"coveragestores\",\n store.name,\n \"coverages.json\"\n ],\n params\n )\n headers = {\n \"Content-type\": \"application/json\",\n \"Accept\": \"application/json\"\n }\n resp = self.http_request(url, headers=headers)\n if resp.status_code != 200:\n FailedRequestError('Failed to get mosaic coverages {} : {}, {}'.format(store, resp.status_code, resp.text))\n self._cache.clear()\n return resp.json()", - "docstring": "Returns all coverages in a coverage store" - }, - { - "code": "def _get_drug_target_interactions(manager: Optional['bio2bel_drugbank.manager'] = None) -> Mapping[str, List[str]]:\n if manager is None:\n import bio2bel_drugbank\n manager = bio2bel_drugbank.Manager()\n if not manager.is_populated():\n manager.populate()\n return manager.get_drug_to_hgnc_symbols()", - "docstring": "Get a mapping from drugs to their list of gene." - }, - { - "code": "def convert_mode(image, mode='RGB'):\n deprecated.deprecated('util.gif.convert_model')\n return image if (image.mode == mode) else image.convert(mode=mode)", - "docstring": "Return an image in the given mode." - }, - { - "code": "def moment(p, v, order=1):\n if order == 1:\n return (v*p).sum()\n elif order == 2:\n return np.sqrt( ((v**2)*p).sum() - (v*p).sum()**2 )", - "docstring": "Calculates the moments of the probability distribution p with vector v" - }, - { - "code": "def EXP_gas(self, base, exponent):\n EXP_SUPPLEMENTAL_GAS = 10\n def nbytes(e):\n result = 0\n for i in range(32):\n result = Operators.ITEBV(512, Operators.EXTRACT(e, i * 8, 8) != 0, i + 1, result)\n return result\n return EXP_SUPPLEMENTAL_GAS * nbytes(exponent)", - "docstring": "Calculate extra gas fee" - }, - { - "code": "def not_found(url, wait=10):\n try: connection = open(url, wait)\n except HTTP404NotFound:\n return True\n except:\n return False\n return False", - "docstring": "Returns True when the url generates a \"404 Not Found\" error." - }, - { - "code": "def run_elective(self, cmd, *args, **kwargs):\n if self._commit:\n return self.run(cmd, *args, **kwargs)\n else:\n notify.warning(\"WOULD RUN: {}\".format(cmd))\n kwargs = kwargs.copy()\n kwargs['echo'] = False\n return self.run('true', *args, **kwargs)", - "docstring": "Run a command, or just echo it, depending on `commit`." - }, - { - "code": "def subtree(events):\n stack = 0\n for obj in events:\n if obj['type'] == ENTER:\n stack += 1\n elif obj['type'] == EXIT:\n if stack == 0:\n break\n stack -= 1\n yield obj", - "docstring": "selects sub-tree events" - }, - { - "code": "def activate(self):\n\t\tobj = self.find_paypal_object()\n\t\tif obj.state == enums.BillingPlanState.CREATED:\n\t\t\tsuccess = obj.activate()\n\t\t\tif not success:\n\t\t\t\traise PaypalApiError(\"Failed to activate plan: %r\" % (obj.error))\n\t\tself.get_or_update_from_api_data(obj, always_sync=True)\n\t\treturn obj", - "docstring": "Activate an plan in a CREATED state." - }, - { - "code": "def local_renderer(self):\n if not self._local_renderer:\n r = self.create_local_renderer()\n self._local_renderer = r\n return self._local_renderer", - "docstring": "Retrieves the cached local renderer." - }, - { - "code": "def copy(self, graph):\n g = styleguide(graph)\n g.order = self.order\n dict.__init__(g, [(k, v) for k, v in self.iteritems()])\n return g", - "docstring": "Returns a copy of the styleguide for the given graph." - }, - { - "code": "def _is_redundant(self, matrix, cutoff=None):\n cutoff = 1.0 - self.feasibility_tol\n extra_col = matrix[:, 0] + 1\n extra_col[matrix.sum(axis=1) == 0] = 2\n corr = np.corrcoef(np.c_[matrix, extra_col])\n corr = np.tril(corr, -1)\n return (np.abs(corr) > cutoff).any(axis=1)", - "docstring": "Identify rdeundant rows in a matrix that can be removed." - }, - { - "code": "def num_tagitems(self, tag):\n query = \"/{t}/{u}/tags/{ta}/items\".format(\n u=self.library_id, t=self.library_type, ta=tag\n )\n return self._totals(query)", - "docstring": "Return the total number of items for the specified tag" - }, - { - "code": "def emit(self, op_code, *args):\n self.tape.append(OP_CODES[op_code](*args))", - "docstring": "Adds op_code with specified args to tape" - }, - { - "code": "def _translate_hits(es_response):\n def translate_result(result):\n translated_result = copy.copy(result)\n data = translated_result.pop(\"_source\")\n translated_result.update({\n \"data\": data,\n \"score\": translated_result[\"_score\"]\n })\n return translated_result\n def translate_facet(result):\n terms = {term[\"term\"]: term[\"count\"] for term in result[\"terms\"]}\n return {\n \"terms\": terms,\n \"total\": result[\"total\"],\n \"other\": result[\"other\"],\n }\n results = [translate_result(hit) for hit in es_response[\"hits\"][\"hits\"]]\n response = {\n \"took\": es_response[\"took\"],\n \"total\": es_response[\"hits\"][\"total\"],\n \"max_score\": es_response[\"hits\"][\"max_score\"],\n \"results\": results,\n }\n if \"facets\" in es_response:\n response[\"facets\"] = {facet: translate_facet(es_response[\"facets\"][facet]) for facet in es_response[\"facets\"]}\n return response", - "docstring": "Provide resultset in our desired format from elasticsearch results" - }, - { - "code": "def getServerInfo(pbclient=None, dc_id=None):\n if pbclient is None:\n raise ValueError(\"argument 'pbclient' must not be None\")\n if dc_id is None:\n raise ValueError(\"argument 'dc_id' must not be None\")\n server_info = []\n servers = pbclient.list_servers(dc_id, 1)\n for server in servers['items']:\n props = server['properties']\n info = dict(id=server['id'], name=props['name'],\n state=server['metadata']['state'],\n vmstate=props['vmState'])\n server_info.append(info)\n return server_info", - "docstring": "gets info of servers of a data center" - }, - { - "code": "def purge_keys(self):\n r = self.local_renderer\n r.env.default_ip = self.hostname_to_ip(self.env.default_hostname)\n r.env.home_dir = '/home/%s' % getpass.getuser()\n r.local('ssh-keygen -f \"{home_dir}/.ssh/known_hosts\" -R {host_string}')\n if self.env.default_hostname:\n r.local('ssh-keygen -f \"{home_dir}/.ssh/known_hosts\" -R {default_hostname}')\n if r.env.default_ip:\n r.local('ssh-keygen -f \"{home_dir}/.ssh/known_hosts\" -R {default_ip}')", - "docstring": "Deletes all SSH keys on the localhost associated with the current remote host." - }, - { - "code": "def whoami():\n try:\n user = PolyaxonClient().auth.get_user()\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not load user info.')\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n click.echo(\"\\nUsername: {username}, Email: {email}\\n\".format(**user.to_dict()))", - "docstring": "Show current logged Polyaxon user." - }, - { - "code": "def connect(self):\n with self.lock:\n if self.stream:\n logger.debug(\"Closing the previously used stream.\")\n self._close_stream()\n transport = TCPTransport(self.settings)\n addr = self.settings[\"server\"]\n if addr:\n service = None\n else:\n addr = self.jid.domain\n service = self.settings[\"c2s_service\"]\n transport.connect(addr, self.settings[\"c2s_port\"], service)\n handlers = self._base_handlers[:]\n handlers += self.handlers + [self]\n self.clear_response_handlers()\n self.setup_stanza_handlers(handlers, \"pre-auth\")\n stream = ClientStream(self.jid, self, handlers, self.settings)\n stream.initiate(transport)\n self.main_loop.add_handler(transport)\n self.main_loop.add_handler(stream)\n self._ml_handlers += [transport, stream]\n self.stream = stream\n self.uplink = stream", - "docstring": "Schedule a new XMPP c2s connection." - }, - { - "code": "def add_edge(self, n1_label, n2_label,directed=False):\n n1 = self.add_node(n1_label)\n n2 = self.add_node(n2_label)\n e = Edge(n1, n2, directed)\n self._edges.append(e)\n return e", - "docstring": "Get or create edges using get_or_create_node" - }, - { - "code": "def shuffle_cols(seqarr, newarr, cols):\n for idx in xrange(cols.shape[0]):\n newarr[:, idx] = seqarr[:, cols[idx]]\n return newarr", - "docstring": "used in bootstrap resampling without a map file" - }, - { - "code": "def keywords(self):\n for table in self.tables:\n if isinstance(table, KeywordTable):\n for keyword in table.keywords:\n yield keyword", - "docstring": "Generator which returns all keywords in the suite" - }, - { - "code": "def update_received_packet(self, received_pkt_size_bytes):\n self.update_count(self.RECEIVED_PKT_COUNT)\n self.update_count(self.RECEIVED_PKT_SIZE, incr_by=received_pkt_size_bytes)", - "docstring": "Update received packet metrics" - }, - { - "code": "def pods(self):\n if not self.xml_tree:\n return []\n return [Pod(elem) for elem in self.xml_tree.findall('pod')]", - "docstring": "Return list of all Pod objects in result" - }, - { - "code": "def parse_scalar(scalar_data, version):\n try:\n return hs_scalar[version].parseString(scalar_data, parseAll=True)[0]\n except pp.ParseException as pe:\n raise ZincParseException(\n 'Failed to parse scalar: %s' % reformat_exception(pe),\n scalar_data, 1, pe.col)\n except:\n LOG.debug('Failing scalar data: %r (version %r)',\n scalar_data, version)", - "docstring": "Parse a Project Haystack scalar in ZINC format." - }, - { - "code": "def clean(ctx):\n ctx.run(f'python setup.py clean')\n dist = ROOT.joinpath('dist')\n print(f'removing {dist}')\n shutil.rmtree(str(dist))", - "docstring": "Clean previously built package artifacts." - }, - { - "code": "def start_scan(self, scan_id):\n requests.post(self.url + 'scans/{}/launch'.format(scan_id), verify=False, headers=self.headers)", - "docstring": "Starts the scan identified by the scan_id.s" - }, - { - "code": "def _get_objects_by_path(self, paths):\n return map(lambda x: self._bus.get_object('org.bluez', x), paths)", - "docstring": "Return a list of all bluez DBus objects from the provided list of paths." - }, - { - "code": "def auth_finish(self, _unused):\n self.lock.acquire()\n try:\n self.__logger.debug(\"Authenticated\")\n self.authenticated=True\n self.state_change(\"authorized\",self.my_jid)\n self._post_auth()\n finally:\n self.lock.release()", - "docstring": "Handle success of the legacy authentication." - }, - { - "code": "def event_choices(events):\n if events is None:\n msg = \"Please add some events in settings.WEBHOOK_EVENTS.\"\n raise ImproperlyConfigured(msg)\n try:\n choices = [(x, x) for x in events]\n except TypeError:\n msg = \"settings.WEBHOOK_EVENTS must be an iterable object.\"\n raise ImproperlyConfigured(msg)\n return choices", - "docstring": "Get the possible events from settings" - }, - { - "code": "def manifest_filename(self):\n r = self.local_renderer\n tp_fn = r.format(r.env.data_dir + '/manifest.yaml')\n return tp_fn", - "docstring": "Returns the path to the manifest file." - }, - { - "code": "def _xml(self, root):\n element = root.createElement(self.name)\n keys = self.attrs.keys()\n keys.sort()\n for a in keys:\n element.setAttribute(a, self.attrs[a])\n if self.body:\n text = root.createTextNode(self.body)\n element.appendChild(text)\n for c in self.elements:\n element.appendChild(c._xml(root))\n return element", - "docstring": "Return an XML element representing this element" - }, - { - "code": "def add_systemnumber(self, source, recid=None):\n if not recid:\n recid = self.get_recid()\n if not self.hidden and recid:\n record_add_field(\n self.record,\n tag='035',\n subfields=[('9', source), ('a', recid)]\n )", - "docstring": "Add 035 number from 001 recid with given source." - }, - { - "code": "def add_example(self, example):\n \"Add an example to the list of examples, checking it first.\"\n self.check_example(example)\n self.examples.append(example)", - "docstring": "Add an example to the list of examples, checking it first." - }, - { - "code": "def MetaOrdered(parallel, done, turnstile):\n class Ordered:\n def __init__(self, iterref):\n if parallel.master:\n done[...] = 0\n self.iterref = iterref\n parallel.barrier()\n @classmethod\n def abort(self):\n turnstile.release()\n def __enter__(self):\n while self.iterref != done:\n pass\n turnstile.acquire()\n return self\n def __exit__(self, *args):\n done[...] += 1\n turnstile.release()\n return Ordered", - "docstring": "meta class for Ordered construct." - }, - { - "code": "def dinner(self, message=\"Dinner is served\", shout: bool = False):\n return self.helper.output(message, shout)", - "docstring": "Say something in the evening" - }, - { - "code": "def num_collectionitems(self, collection):\n query = \"/{t}/{u}/collections/{c}/items\".format(\n u=self.library_id, t=self.library_type, c=collection.upper()\n )\n return self._totals(query)", - "docstring": "Return the total number of items in the specified collection" - }, - { - "code": "def remove_node(self, id):\n if self.has_key(id):\n n = self[id]\n self.nodes.remove(n)\n del self[id]\n for e in list(self.edges):\n if n in (e.node1, e.node2):\n if n in e.node1.links: \n e.node1.links.remove(n)\n if n in e.node2.links: \n e.node2.links.remove(n)\n self.edges.remove(e)", - "docstring": "Remove node with given id." - }, - { - "code": "def twosided_2_centerdc(data):\n N = len(data)\n newpsd = np.concatenate((cshift(data[N//2:], 1), data[0:N//2]))\n newpsd[0] = data[-1]\n return newpsd", - "docstring": "Convert a two-sided PSD to a center-dc PSD" - }, - { - "code": "def load_document(self, id):\n fields = self.redis.hgetall(id)\n if six.PY3:\n f2 = {to_string(k): to_string(v) for k, v in fields.items()}\n fields = f2\n try:\n del fields['id']\n except KeyError:\n pass\n return Document(id=id, **fields)", - "docstring": "Load a single document by id" - }, - { - "code": "def contains_duplicates(self):\n try:\n if len(self.val) != len(set(self.val)):\n return self\n except TypeError:\n raise TypeError('val is not iterable')\n self._err('Expected <%s> to contain duplicates, but did not.' % self.val)", - "docstring": "Asserts that val is iterable and contains duplicate items." - }, - { - "code": "def _import_mapping(mapping, original=None):\n for key, value in list(mapping.items()):\n if isinstance(key, string_types):\n try:\n cls = import_item(key)\n except Exception:\n if original and key not in original:\n print(\"ERROR: canning class not importable: %r\", key, exc_info=True)\n mapping.pop(key)\n else:\n mapping[cls] = mapping.pop(key)", - "docstring": "Import any string-keys in a type mapping." - }, - { - "code": "def configure_custom(self, config):\n c = config.pop('()')\n if not hasattr(c, '__call__') and \\\n hasattr(types, 'ClassType') and isinstance(c, types.ClassType):\n c = self.resolve(c)\n props = config.pop('.', None)\n kwargs = dict((k, config[k]) for k in config if valid_ident(k))\n result = c(**kwargs)\n if props:\n for name, value in props.items():\n setattr(result, name, value)\n return result", - "docstring": "Configure an object with a user-supplied factory." - }, - { - "code": "def convert_input(X):\n if not isinstance(X, pd.DataFrame):\n if isinstance(X, list):\n X = pd.DataFrame(X)\n elif isinstance(X, (np.generic, np.ndarray)):\n X = pd.DataFrame(X)\n elif isinstance(X, csr_matrix):\n X = pd.DataFrame(X.todense())\n elif isinstance(X, pd.Series):\n X = pd.DataFrame(X)\n else:\n raise ValueError('Unexpected input type: %s' % (str(type(X))))\n X = X.apply(lambda x: pd.to_numeric(x, errors='ignore'))\n return X", - "docstring": "Unite data into a DataFrame." - }, - { - "code": "def get(self, request, *args, **kwargs):\n formset = self.construct_formset()\n return self.render_to_response(self.get_context_data(formset=formset))", - "docstring": "Handles GET requests and instantiates a blank version of the formset." - }, - { - "code": "def read(self, path):\n with open(path, \"rb\") as fout:\n memmove(self.m_buf, fout.read(self.m_size), self.m_size)", - "docstring": "Read file to buffer" - }, - { - "code": "def scale_out(self, blocks=1, block_size=1):\n self.config['sites.jetstream.{0}'.format(self.pool)]['flavor']\n count = 0\n if blocks == 1:\n block_id = len(self.blocks)\n self.blocks[block_id] = []\n for instance_id in range(0, block_size):\n instances = self.server_manager.create(\n 'parsl-{0}-{1}'.format(block_id, instance_id),\n self.client.images.get('87e08a17-eae2-4ce4-9051-c561d9a54bde'),\n self.client.flavors.list()[0],\n min_count=1,\n max_count=1,\n userdata=setup_script.format(engine_config=self.engine_config),\n key_name='TG-MCB090174-api-key',\n security_groups=['global-ssh'],\n nics=[{\n \"net-id\": '724a50cf-7f11-4b3b-a884-cd7e6850e39e',\n \"net-name\": 'PARSL-priv-net',\n \"v4-fixed-ip\": ''\n }])\n self.blocks[block_id].extend([instances])\n count += 1\n return count", - "docstring": "Scale out the existing resources." - }, - { - "code": "def regular_to_pix(self):\n return mapper_util.voronoi_regular_to_pix_from_grids_and_geometry(regular_grid=self.grid_stack.regular,\n regular_to_nearest_pix=self.grid_stack.pix.regular_to_nearest_pix,\n pixel_centres=self.geometry.pixel_centres, pixel_neighbors=self.geometry.pixel_neighbors,\n pixel_neighbors_size=self.geometry.pixel_neighbors_size).astype('int')", - "docstring": "The 1D index mappings between the regular pixels and Voronoi pixelization pixels." - }, - { - "code": "def create(self):\n input_params = {\n \"name\": self.name,\n \"public_key\": self.public_key,\n }\n data = self.get_data(\"account/keys/\", type=POST, params=input_params)\n if data:\n self.id = data['ssh_key']['id']", - "docstring": "Create the SSH Key" - }, - { - "code": "def upload(self, docs_base, release):\n return getattr(self, '_to_' + self.target)(docs_base, release)", - "docstring": "Upload docs in ``docs_base`` to the target of this uploader." - }, - { - "code": "def cli(id):\n ch = Analyse(id)\n ch.full_analysis()\n click.echo(\n 'Created: %s. Modified: %s. Deleted: %s' % (ch.create, ch.modify, ch.delete)\n )\n if ch.is_suspect:\n click.echo('The changeset {} is suspect! Reasons: {}'.format(\n id,\n ', '.join(ch.suspicion_reasons)\n ))\n else:\n click.echo('The changeset %s is not suspect!' % id)", - "docstring": "Analyse an OpenStreetMap changeset." - }, - { - "code": "def _fix_type(value):\n if isinstance(value, string_types):\n return str(value)\n if isinstance(value, float_):\n return float(value)\n if isinstance(value, bool_):\n return bool(value)\n if isinstance(value, set):\n return list(value)\n if isinstance(value, dict):\n return OrderedDict((key, value[key]) for key in sorted(value))\n if value.__class__.__name__ == \"Formula\":\n return str(value)\n if value is None:\n return \"\"\n return value", - "docstring": "convert possible types to str, float, and bool" - }, - { - "code": "def _place_row(self, row, position):\n self._rows_in_grid[row] = RowInGrid(row, position)", - "docstring": "place the instruction on a grid" - }, - { - "code": "def render_toolbar(context, config):\n quill_config = getattr(quill_app, config)\n t = template.loader.get_template(quill_config['toolbar_template'])\n return t.render(context)", - "docstring": "Render the toolbar for the given config." - }, - { - "code": "def verify_editor(self):\n powerful_editors = [\n 'josm', 'level0', 'merkaartor', 'qgis', 'arcgis', 'upload.py',\n 'osmapi', 'Services_OpenStreetMap'\n ]\n if self.editor is not None:\n for editor in powerful_editors:\n if editor in self.editor.lower():\n self.powerfull_editor = True\n break\n if 'iD' in self.editor:\n trusted_hosts = [\n 'www.openstreetmap.org/id',\n 'www.openstreetmap.org/edit',\n 'improveosm.org',\n 'strava.github.io/iD',\n 'preview.ideditor.com/release',\n 'preview.ideditor.com/master',\n 'hey.mapbox.com/iD-internal',\n 'projets.pavie.info/id-indoor',\n 'maps.mapcat.com/edit',\n 'id.softek.ir'\n ]\n if self.host.split('://')[-1].strip('/') not in trusted_hosts:\n self.label_suspicious('Unknown iD instance')\n else:\n self.powerfull_editor = True\n self.label_suspicious('Software editor was not declared')", - "docstring": "Verify if the software used in the changeset is a powerfull_editor." - }, - { - "code": "def _read_config(self, filename=None):\n if filename:\n self._config_filename = filename\n else:\n try:\n import appdirs\n except ImportError:\n raise Exception(\"Missing dependency for determining config path. Please install \"\n \"the 'appdirs' Python module.\")\n self._config_filename = appdirs.user_config_dir(_LIBRARY_NAME, \"ProfitBricks\") + \".ini\"\n if not self._config:\n self._config = configparser.ConfigParser()\n self._config.optionxform = str\n self._config.read(self._config_filename)", - "docstring": "Read the user configuration" - }, - { - "code": "def _rename(self, name, callback):\n self._coroutine_queue.put(self._conversation.rename(name))\n callback()", - "docstring": "Rename conversation and call callback." - }, - { - "code": "def query_admins_by_group_ids(cls, groups_ids=None):\n assert groups_ids is None or isinstance(groups_ids, list)\n query = db.session.query(\n Group.id, func.count(GroupAdmin.id)\n ).join(\n GroupAdmin\n ).group_by(\n Group.id\n )\n if groups_ids:\n query = query.filter(Group.id.in_(groups_ids))\n return query", - "docstring": "Get count of admins per group." - }, - { - "code": "def refresh(self):\n response = self.__raw = self._conn.get(self.url)\n self._populate_fields(**response)\n self._loaded = True", - "docstring": "Update the Server information and list of API Roots" - }, - { - "code": "def download(course, tid=None, dl_all=False, force=False, upgradejava=False,\n update=False):\n def dl(id):\n download_exercise(Exercise.get(Exercise.tid == id),\n force=force,\n update_java=upgradejava,\n update=update)\n if dl_all:\n for exercise in list(course.exercises):\n dl(exercise.tid)\n elif tid is not None:\n dl(int(tid))\n else:\n for exercise in list(course.exercises):\n if not exercise.is_completed:\n dl(exercise.tid)\n else:\n exercise.update_downloaded()", - "docstring": "Download the exercises from the server." - }, - { - "code": "def getOwnerKeyForAccount(self, name):\n account = self.rpc.get_account(name)\n for authority in account[\"owner\"][\"key_auths\"]:\n key = self.getPrivateKeyForPublicKey(authority[0])\n if key:\n return key\n raise KeyNotFound", - "docstring": "Obtain owner Private Key for an account from the wallet database" - }, - { - "code": "def getAccounts(self):\n pubkeys = self.getPublicKeys()\n accounts = []\n for pubkey in pubkeys:\n if pubkey[: len(self.prefix)] == self.prefix:\n accounts.extend(self.getAccountsFromPublicKey(pubkey))\n return accounts", - "docstring": "Return all accounts installed in the wallet database" - }, - { - "code": "def to_float(option,value):\n if type(value) is str:\n try:\n value=float(value)\n except ValueError:\n pass\n return (option,value)", - "docstring": "Converts string values to floats when appropriate" - }, - { - "code": "def _most_popular_gender(self, name, counter):\n if name not in self.names:\n return self.unknown_value\n max_count, max_tie = (0, 0)\n best = self.names[name].keys()[0]\n for gender, country_values in self.names[name].items():\n count, tie = counter(country_values)\n if count > max_count or (count == max_count and tie > max_tie):\n max_count, max_tie, best = count, tie, gender\n return best if max_count > 0 else self.unknown_value", - "docstring": "Finds the most popular gender for the given name counting by given counter" - }, - { - "code": "def once(self, event, listener):\n self.emit('new_listener', event, listener)\n self._once[event].append(listener)\n self._check_limit(event)\n return self", - "docstring": "Add a listener that is only called once." - }, - { - "code": "def write_numeric_array(fd, header, array):\n bd = BytesIO()\n write_var_header(bd, header)\n if not isinstance(array, basestring) and header['dims'][0] > 1:\n array = list(chain.from_iterable(izip(*array)))\n write_elements(bd, header['mtp'], array)\n data = bd.getvalue()\n bd.close()\n write_var_data(fd, data)", - "docstring": "Write the numeric array" - }, - { - "code": "def iter_char_block(self, text=None, width=60, fmtfunc=str):\n if width < 1:\n width = 1\n text = (self.text if text is None else text) or ''\n text = ' '.join(text.split('\\n'))\n escapecodes = get_codes(text)\n if not escapecodes:\n yield from (\n fmtfunc(text[i:i + width])\n for i in range(0, len(text), width)\n )\n else:\n blockwidth = 0\n block = []\n for i, s in enumerate(get_indices_list(text)):\n block.append(s)\n if len(s) == 1:\n blockwidth += 1\n if blockwidth == width:\n yield ''.join(block)\n block = []\n blockwidth = 0\n if block:\n yield ''.join(block)", - "docstring": "Format block by splitting on individual characters." - }, - { - "code": "def _close_stream(self):\n self.stream.close()\n if self.stream.transport in self._ml_handlers:\n self._ml_handlers.remove(self.stream.transport)\n self.main_loop.remove_handler(self.stream.transport)\n self.stream = None\n self.uplink = None", - "docstring": "Same as `close_stream` but with the `lock` acquired." - }, - { - "code": "def render_lights_debug(self, camera_matrix, projection):\n self.ctx.enable(moderngl.BLEND)\n self.ctx.blend_func = moderngl.SRC_ALPHA, moderngl.ONE_MINUS_SRC_ALPHA\n for light in self.point_lights:\n m_mv = matrix44.multiply(light.matrix, camera_matrix)\n light_size = light.radius\n self.debug_shader[\"m_proj\"].write(projection.tobytes())\n self.debug_shader[\"m_mv\"].write(m_mv.astype('f4').tobytes())\n self.debug_shader[\"size\"].value = light_size\n self.unit_cube.render(self.debug_shader, mode=moderngl.LINE_STRIP)\n self.ctx.disable(moderngl.BLEND)", - "docstring": "Render outlines of light volumes" - }, - { - "code": "def _walk(self):\n while self._todo:\n args = self._todo.pop(0)\n self._step(*args)", - "docstring": "Loop through all the instructions that are `_todo`." - }, - { - "code": "def proto_VC_50_MT_IV(abf=exampleABF):\n swhlab.memtest.memtest(abf)\n swhlab.memtest.checkSweep(abf)\n swhlab.plot.save(abf,tag='02-check',resize=False)\n av1,sd1=swhlab.plot.IV(abf,1.2,1.4,True,'b')\n swhlab.plot.save(abf,tag='iv')\n Xs=abf.clampValues(1.2)\n abf.saveThing([Xs,av1],'01_iv')", - "docstring": "combination of membrane test and IV steps." - }, - { - "code": "def invert(self):\r\n m = self.matrix\r\n d = m[0] * m[4] - m[1] * m[3]\r\n self.matrix = [\r\n m[4] / d, -m[1] / d, 0,\r\n -m[3] / d, m[0] / d, 0,\r\n (m[3] * m[7] - m[4] * m[6]) / d,\r\n -(m[0] * m[7] - m[1] * m[6]) / d,\r\n 1\r\n ]", - "docstring": "Multiplying a matrix by its inverse produces the identity matrix." - }, - { - "code": "def _authenticate(self):\n auth_url = BASE_URL + \"/auth/token\"\n payload = {'username': self.email, 'password': self.password, 'grant_type': 'password'}\n arequest = requests.post(auth_url, data=payload, headers=BASIC_HEADERS)\n status = arequest.status_code\n if status != 200:\n _LOGGER.error(\"Authentication request failed, please check credintials. \" + str(status))\n return False\n response = arequest.json()\n _LOGGER.debug(str(response))\n self.token = response.get(\"access_token\")\n self.refresh_token = response.get(\"refresh_token\")\n _auth = HEADERS.get(\"Authorization\")\n _auth = _auth % self.token\n HEADERS[\"Authorization\"] = _auth\n _LOGGER.info(\"Authentication was successful, token set.\")\n return True", - "docstring": "Authenticate with the API and return an authentication token." - }, - { - "code": "def full_like(array, value, dtype=None):\n shared = empty_like(array, dtype)\n shared[:] = value\n return shared", - "docstring": "Create a shared memory array with the same shape and type as a given array, filled with `value`." - }, - { - "code": "def items(self):\n query = \"SELECT {}, {} from {}\".format(\n self.__key__, self.__value__, self.__tablename__\n )\n connection = sqlite3.connect(self.sqlite_file)\n cursor = connection.cursor()\n cursor.execute(query)\n r = []\n for key, value in cursor.fetchall():\n r.append((key, value))\n return r", - "docstring": "returns all items off the store as tuples" - }, - { - "code": "def em_rates_from_E_DA_mix(em_rates_tot, E_values):\n em_rates_d, em_rates_a = [], []\n for em_rate_tot, E_value in zip(em_rates_tot, E_values):\n em_rate_di, em_rate_ai = em_rates_from_E_DA(em_rate_tot, E_value)\n em_rates_d.append(em_rate_di)\n em_rates_a.append(em_rate_ai)\n return em_rates_d, em_rates_a", - "docstring": "D and A emission rates for two populations." - }, - { - "code": "def many_until1(these, term):\n first = [these()]\n these_results, term_result = many_until(these, term)\n return (first + these_results, term_result)", - "docstring": "Like many_until but must consume at least one of these." - }, - { - "code": "def parse_args(parser, provider_required_args, argv):\n epilog = 'Provider-required arguments:\\n'\n for provider in provider_required_args:\n epilog += ' %s: %s\\n' % (provider, provider_required_args[provider])\n parser.epilog = epilog\n args = parser.parse_args(argv)\n for arg in provider_required_args[args.provider]:\n if not args.__getattribute__(arg):\n parser.error('argument --%s is required' % arg)\n return args", - "docstring": "Add provider required arguments epilog message, parse, and validate." - }, - { - "code": "def register(self, model):\n self.models[model._meta.table_name] = model\n model._meta.database = self.database\n return model", - "docstring": "Register a model in self." - }, - { - "code": "def unindent(self):\n if self.tab_always_indent:\n cursor = self.editor.textCursor()\n if not cursor.hasSelection():\n cursor.select(cursor.LineUnderCursor)\n self.unindent_selection(cursor)\n else:\n super(PyIndenterMode, self).unindent()", - "docstring": "Performs an un-indentation" - }, - { - "code": "def init_env():\n env.ROLES_DIR = ROLE_DIR\n env.services = []\n env.confirm_deployment = False\n env.is_local = None\n env.base_config_dir = '.'\n env.src_dir = 'src'\n env.sites = {}\n env[SITE] = None\n env[ROLE] = None\n env.hosts_retriever = None\n env.hosts_retrievers = type(env)()\n env.hostname_translator = 'default'\n env.hostname_translators = type(env)()\n env.hostname_translators.default = lambda hostname: hostname\n env.default_site = None\n env.available_sites = []\n env.available_sites_by_host = {}\n env.disk_usage_command = \"df -H | grep -vE '^Filesystem|tmpfs|cdrom|none' | awk '{print $5 \" \" $1}'\"\n env.burlap_data_dir = '.burlap'\n env.setdefault('roledefs', {})\n env.setdefault('roles', [])\n env.setdefault('hosts', [])\n env.setdefault('exclude_hosts', [])", - "docstring": "Populates the global env variables with custom default settings." - }, - { - "code": "def sum_out(self, var, bn):\n \"Make a factor eliminating var by summing over its values.\"\n vars = [X for X in self.vars if X != var]\n cpt = dict((event_values(e, vars),\n sum(self.p(extend(e, var, val))\n for val in bn.variable_values(var)))\n for e in all_events(vars, bn, {}))\n return Factor(vars, cpt)", - "docstring": "Make a factor eliminating var by summing over its values." - }, - { - "code": "def sort_seeds(uhandle, usort):\n cmd = [\"sort\", \"-k\", \"2\", uhandle, \"-o\", usort]\n proc = sps.Popen(cmd, close_fds=True)\n proc.communicate()", - "docstring": "sort seeds from cluster results" - }, - { - "code": "def ConstructObject(self, py_obj):\n obj = self.NewObject()\n for k, v in py_obj.items():\n obj.put(unicode(k), v)\n return obj", - "docstring": "note py_obj items are NOT converted to PyJs types!" - }, - { - "code": "def list_trilegal_filtersystems():\n print('%-40s %s' % ('FILTER SYSTEM NAME','DESCRIPTION'))\n print('%-40s %s' % ('------------------','-----------'))\n for key in sorted(TRILEGAL_FILTER_SYSTEMS.keys()):\n print('%-40s %s' % (key, TRILEGAL_FILTER_SYSTEMS[key]['desc']))", - "docstring": "This just lists all the filter systems available for TRILEGAL." - }, - { - "code": "def query(self, i, j):\n \"Query the oracle to find out whether i and j should be must-linked\"\n if self.queries_cnt < self.max_queries_cnt:\n self.queries_cnt += 1\n return self.labels[i] == self.labels[j]\n else:\n raise MaximumQueriesExceeded", - "docstring": "Query the oracle to find out whether i and j should be must-linked" - }, - { - "code": "def _add_user_from_conv_part(self, conv_part):\n user_ = User.from_conv_part_data(conv_part, self._self_user.id_)\n existing = self._user_dict.get(user_.id_)\n if existing is None:\n logger.warning('Adding fallback User with %s name \"%s\"',\n user_.name_type.name.lower(), user_.full_name)\n self._user_dict[user_.id_] = user_\n return user_\n else:\n existing.upgrade_name(user_)\n return existing", - "docstring": "Add or upgrade User from ConversationParticipantData." - }, - { - "code": "def _setVirtualEnv():\n try:\n activate = options.virtualenv.activate_cmd\n except AttributeError:\n activate = None\n if activate is None:\n virtualenv = path(os.environ.get('VIRTUAL_ENV', ''))\n if not virtualenv:\n virtualenv = options.paved.cwd\n else:\n virtualenv = path(virtualenv)\n activate = virtualenv / 'bin' / 'activate'\n if activate.exists():\n info('Using default virtualenv at %s' % activate)\n options.setdotted('virtualenv.activate_cmd', 'source %s' % activate)", - "docstring": "Attempt to set the virtualenv activate command, if it hasn't been specified." - }, - { - "code": "def indexesOptional(f):\n stack = inspect.stack()\n _NO_INDEX_CHECK_NEEDED.add('%s.%s.%s' % (f.__module__, stack[1][3], f.__name__))\n del stack\n return f", - "docstring": "Decorate test methods with this if you don't require strict index checking" - }, - { - "code": "def do_fullscreen(self, widget):\n self.fullscreen()\n self.is_fullscreen = True\n while Gtk.events_pending():\n Gtk.main_iteration()\n self.bot._screen_width = Gdk.Screen.width()\n self.bot._screen_height = Gdk.Screen.height()\n self.bot._screen_ratio = self.bot._screen_width / self.bot._screen_height", - "docstring": "Widget Action to Make the window fullscreen and update the bot." - }, - { - "code": "def _load_class(cls, d):\n for k, v in d.items():\n if isinstance(k, tuple):\n typ, k = k\n if typ == 'property':\n v = property(*v)\n elif typ == 'staticmethod':\n v = staticmethod(v)\n elif typ == 'classmethod':\n v = classmethod(v)\n setattr(cls, k, v)\n return cls", - "docstring": "Loads additional properties into class `cls`." - }, - { - "code": "def _build_dict_from_key_value(keys_and_values):\n key_dict = {}\n for key_value in keys_and_values:\n if '=' not in key_value:\n raise GhostError('Pair {0} is not of `key=value` format'.format(\n key_value))\n key, value = key_value.split('=', 1)\n key_dict.update({str(key): str(value)})\n return key_dict", - "docstring": "Return a dict from a list of key=value pairs" - }, - { - "code": "def _cleanup_and_die(data):\n tmpfiles = glob.glob(os.path.join(data.dirs.fastqs, \"tmp_*_R*.fastq\"))\n tmpfiles += glob.glob(os.path.join(data.dirs.fastqs, \"tmp_*.p\"))\n for tmpf in tmpfiles: \n os.remove(tmpf)", - "docstring": "cleanup func for step 1" - }, - { - "code": "def _get_localization_env(self, inputs, user_project):\n non_empty_inputs = [var for var in inputs if var.value]\n env = {'INPUT_COUNT': str(len(non_empty_inputs))}\n for idx, var in enumerate(non_empty_inputs):\n env['INPUT_{}'.format(idx)] = var.name\n env['INPUT_RECURSIVE_{}'.format(idx)] = str(int(var.recursive))\n env['INPUT_SRC_{}'.format(idx)] = var.value\n dst = os.path.join(providers_util.DATA_MOUNT_POINT, var.docker_path)\n path, filename = os.path.split(dst)\n if '*' in filename:\n dst = '{}/'.format(path)\n env['INPUT_DST_{}'.format(idx)] = dst\n env['USER_PROJECT'] = user_project\n return env", - "docstring": "Return a dict with variables for the 'localization' action." - }, - { - "code": "def sanitize(self, example):\n \"Return a copy of example, with non-input attributes replaced by None.\"\n return [attr_i if i in self.inputs else None\n for i, attr_i in enumerate(example)]", - "docstring": "Return a copy of example, with non-input attributes replaced by None." - }, - { - "code": "def filelist(self):\n if len(self._filelist) == 0:\n for item in self._data:\n if isinstance(self._data[item], filetree):\n self._filelist.extend(self._data[item].filelist())\n else:\n self._filelist.append(self._data[item])\n return self._filelist", - "docstring": "Return list of files in filetree." - }, - { - "code": "def upload_gif(gif):\n client_id = os.environ.get('IMGUR_API_ID')\n client_secret = os.environ.get('IMGUR_API_SECRET')\n if client_id is None or client_secret is None:\n click.echo('Cannot upload - could not find IMGUR_API_ID or IMGUR_API_SECRET environment variables')\n return\n client = ImgurClient(client_id, client_secret)\n click.echo('Uploading file {}'.format(click.format_filename(gif)))\n response = client.upload_from_path(gif)\n click.echo('File uploaded - see your gif at {}'.format(response['link']))", - "docstring": "Uploads an image file to Imgur" - }, - { - "code": "def _ConvertValueMessage(value, message):\n if isinstance(value, dict):\n _ConvertStructMessage(value, message.struct_value)\n elif isinstance(value, list):\n _ConvertListValueMessage(value, message.list_value)\n elif value is None:\n message.null_value = 0\n elif isinstance(value, bool):\n message.bool_value = value\n elif isinstance(value, six.string_types):\n message.string_value = value\n elif isinstance(value, _INT_OR_FLOAT):\n message.number_value = value\n else:\n raise ParseError('Unexpected type for Value message.')", - "docstring": "Convert a JSON representation into Value message." - }, - { - "code": "def do_play(self, line):\n if self.pause_speed is None:\n self.bot._speed = self.pause_speed\n self.pause_speed = None\n self.print_response(\"Play\")", - "docstring": "Resume playback if bot is paused" - }, - { - "code": "def check_java_home_set():\n if \"JAVA_HOME\" not in os.environ:\n Log.error(\"JAVA_HOME not set\")\n return False\n java_path = get_java_path()\n if os.path.isfile(java_path) and os.access(java_path, os.X_OK):\n return True\n Log.error(\"JAVA_HOME/bin/java either does not exist or not an executable\")\n return False", - "docstring": "Check if the java home set" - }, - { - "code": "def to_dict(self, include_meta=False):\n result = super(JackalDoc, self).to_dict(include_meta=include_meta)\n if include_meta:\n source = result.pop('_source')\n return {**result, **source}\n else:\n return result", - "docstring": "Returns the result as a dictionary, provide the include_meta flag to als show information like index and doctype." - }, - { - "code": "def _add_annotation_to_graph(self, graph: BELGraph) -> None:\n if 'bio2bel' not in graph.annotation_list:\n graph.annotation_list['bio2bel'] = set()\n graph.annotation_list['bio2bel'].add(self.module_name)", - "docstring": "Add this manager as an annotation to the graph." - }, - { - "code": "async def _find(self, ctx, *, query):\r\n if not query.startswith('ytsearch:') and not query.startswith('scsearch:'):\r\n query = 'ytsearch:' + query\r\n results = await self.bot.lavalink.get_tracks(query)\r\n if not results or not results['tracks']:\r\n return await ctx.send('Nothing found')\r\n tracks = results['tracks'][:10]\n o = ''\r\n for index, track in enumerate(tracks, start=1):\r\n track_title = track[\"info\"][\"title\"]\r\n track_uri = track[\"info\"][\"uri\"]\r\n o += f'`{index}.` [{track_title}]({track_uri})\\n'\r\n embed = discord.Embed(color=discord.Color.blurple(), description=o)\r\n await ctx.send(embed=embed)", - "docstring": "Lists the first 10 search results from a given query." - }, - { - "code": "def _open(self, archive):\n try:\n handle = unrarlib.RAROpenArchiveEx(ctypes.byref(archive))\n except unrarlib.UnrarException:\n raise BadRarFile(\"Invalid RAR file.\")\n return handle", - "docstring": "Open RAR archive file." - }, - { - "code": "def add_memory(request, slug):\n event = get_object_or_404(Event, slug=slug)\n form = MemoryForm(request.POST or None, request.FILES or None)\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.event = event\n instance.save()\n msg = \"Your thoughts were added. \"\n if request.FILES:\n photo_list = request.FILES.getlist('photos')\n photo_count = len(photo_list)\n for upload_file in photo_list:\n process_upload(upload_file, instance, form, event, request)\n if photo_count > 1:\n msg += \"{} images were added and should appear soon.\".format(photo_count)\n else:\n msg += \"{} image was added and should appear soon.\".format(photo_count)\n messages.success(request, msg)\n return HttpResponseRedirect('../')\n return render(request, 'happenings/add_memories.html', {'form': form, 'event': event})", - "docstring": "Adds a memory to an event." - }, - { - "code": "def _real_time_thread(self):\n while self.ws_client.connected():\n if self.die:\n break\n if self.pause:\n sleep(5)\n continue\n message = self.ws_client.receive()\n if message is None:\n break\n message_type = message['type']\n if message_type == 'error':\n continue\n if message['sequence'] <= self.sequence:\n continue\n if message_type == 'open':\n self._handle_open(message)\n elif message_type == 'match':\n self._handle_match(message)\n elif message_type == 'done':\n self._handle_done(message)\n elif message_type == 'change':\n self._handle_change(message)\n else:\n continue\n self.ws_client.disconnect()", - "docstring": "Handles real-time updates to the order book." - }, - { - "code": "def read_file(path, session=None):\r\n try:\r\n data = loadmat(path, struct_as_record=True)\r\n except UnicodeDecodeError as e:\r\n raise Oct2PyError(str(e))\r\n out = dict()\r\n for (key, value) in data.items():\r\n out[key] = _extract(value, session)\r\n return out", - "docstring": "Read the data from the given file path." - }, - { - "code": "def password(at_least=6, at_most=12, lowercase=True,\n uppercase=True, digits=True, spaces=False, punctuation=False):\n return text(at_least=at_least, at_most=at_most, lowercase=lowercase,\n uppercase=uppercase, digits=digits, spaces=spaces,\n punctuation=punctuation)", - "docstring": "Return a random string for use as a password." - }, - { - "code": "def iter_dict_differences(a, b):\n common_keys = set(a).union(b)\n for k in common_keys:\n a_value = a.get(k)\n b_value = b.get(k)\n if a_value != b_value:\n yield k, (a_value, b_value)", - "docstring": "Returns a generator yielding all the keys that have values that differ between each dictionary." - }, - { - "code": "def _get_bolt(self):\n bolt = topology_pb2.Bolt()\n bolt.comp.CopyFrom(self._get_base_component())\n self._add_in_streams(bolt)\n self._add_out_streams(bolt)\n return bolt", - "docstring": "Returns Bolt protobuf message" - }, - { - "code": "def _put_resource(self, url, body):\n headers = {\"Content-Type\": \"application/json\",\n \"Accept\": \"application/json\"}\n if self.token:\n headers[\"W-Token\"] = \"%s\" % self.token\n response = WhenIWork_DAO().putURL(url, headers, json.dumps(body))\n if not (response.status == 200 or response.status == 201 or\n response.status == 204):\n raise DataFailureException(url, response.status, response.data)\n return json.loads(response.data)", - "docstring": "When I Work PUT method." - }, - { - "code": "def removeAllRecords(self):\n for field in self.fields:\n field.encodings, field.values=[], []\n field.numRecords, field.numEncodings= (0, 0)", - "docstring": "Deletes all the values in the dataset" - }, - { - "code": "def save_file(self):\n with open(self.write_file, 'w') as out_nb:\n json.dump(self.work_notebook, out_nb, indent=2)", - "docstring": "Saves the notebook to a file" - }, - { - "code": "def validate_param_name(name, param_type):\n if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):\n raise ValueError('Invalid %s: %s' % (param_type, name))", - "docstring": "Validate that the name follows posix conventions for env variables." - }, - { - "code": "def argument_search(self):\n arguments, _ = self.argparser.parse_known_args()\n return self.search(**vars(arguments))", - "docstring": "Uses the command line arguments to fill the search function and call it." - }, - { - "code": "def getTotalw(self):\n w = sum([field.w for field in self.fields])\n return w", - "docstring": "Returns the cumulative w for all the fields in the dataset" - }, - { - "code": "def run(self):\n if self.args.roster_cache and os.path.exists(self.args.roster_cache):\n logging.info(u\"Loading roster from {0!r}\"\n .format(self.args.roster_cache))\n try:\n self.client.roster_client.load_roster(self.args.roster_cache)\n except (IOError, ValueError), err:\n logging.error(u\"Could not load the roster: {0!r}\".format(err))\n self.client.connect()\n self.client.run()", - "docstring": "Request client connection and start the main loop." - }, - { - "code": "def filter_bolts(table, header):\n bolts_info = []\n for row in table:\n if row[0] == 'bolt':\n bolts_info.append(row)\n return bolts_info, header", - "docstring": "filter to keep bolts" - }, - { - "code": "def _subsample(self):\n spans = self.maparr\n samp = np.zeros(spans.shape[0], dtype=np.uint64)\n for i in xrange(spans.shape[0]):\n samp[i] = np.random.randint(spans[i, 0], spans[i, 1], 1)\n return samp", - "docstring": "returns a subsample of unlinked snp sites" - }, - { - "code": "def _start(self):\n self._instruction_library = self._spec.new_default_instructions()\n self._as_instruction = self._instruction_library.as_instruction\n self._id_cache = {}\n self._pattern_set = None\n self._inheritance_todos = []\n self._instruction_todos = []", - "docstring": "Initialize the parsing process." - }, - { - "code": "def add_conversation_tab(self, conv_id, switch=False):\n conv_widget = self.get_conv_widget(conv_id)\n self._tabbed_window.set_tab(conv_widget, switch=switch,\n title=conv_widget.title)", - "docstring": "Add conversation tab if not present, and optionally switch to it." - }, - { - "code": "def new_noncomment(self, start_lineno, end_lineno):\n block = NonComment(start_lineno, end_lineno)\n self.blocks.append(block)\n self.current_block = block", - "docstring": "We are transitioning from a noncomment to a comment." - }, - { - "code": "def report(self, linenumber, filename, severity, message, rulename, char):\n if self._print_filename is not None:\n print(\"+ \" + self._print_filename)\n self._print_filename = None\n if severity in (WARNING, ERROR):\n self.counts[severity] += 1\n else:\n self.counts[\"other\"] += 1\n print(self.args.format.format(linenumber=linenumber, filename=filename,\n severity=severity, message=message.encode('utf-8'),\n rulename=rulename, char=char))", - "docstring": "Report a rule violation" - }, - { - "code": "def strip_ethernet(packet):\n if not isinstance(packet, Ethernet):\n packet = Ethernet(packet)\n payload = packet.payload\n return payload", - "docstring": "Strip the Ethernet frame from a packet." - }, - { - "code": "def lookup(cls, key, get=False):\n if get:\n item = cls._item_dict.get(key)\n return item.name if item else key\n return cls._item_dict[key].name", - "docstring": "Returns the label for a given Enum key" - }, - { - "code": "def _translate_string(self, data, length):\r\n for index, char in enumerate(data):\r\n if index == length:\r\n break\r\n yield self._meta.characters - 1 - self._ct[char]", - "docstring": "Translate string into character texture positions" - }, - { - "code": "def add(self, *args):\n for obj in args:\n if isinstance(obj, numbers.Number):\n obj = str(obj)\n if isinstance(obj, basestring):\n obj = escape(obj)\n self.children.append(obj)\n elif isinstance(obj, dom_tag):\n ctx = dom_tag._with_contexts[_get_thread_context()]\n if ctx and ctx[-1]:\n ctx[-1].used.add(obj)\n self.children.append(obj)\n obj.parent = self\n obj.setdocument(self.document)\n elif isinstance(obj, dict):\n for attr, value in obj.items():\n self.set_attribute(*dom_tag.clean_pair(attr, value))\n elif hasattr(obj, '__iter__'):\n for subobj in obj:\n self.add(subobj)\n else:\n raise ValueError('%r not a tag or string.' % obj)\n if len(args) == 1:\n return args[0]\n return args", - "docstring": "Add new child tags." - }, - { - "code": "def ssh_config(self, name=''):\n r = self.local_renderer\n with self.settings(hide('running')):\n output = r.local('vagrant ssh-config %s' % name, capture=True)\n config = {}\n for line in output.splitlines()[1:]:\n key, value = line.strip().split(' ', 2)\n config[key] = value\n return config", - "docstring": "Get the SSH parameters for connecting to a vagrant VM." - }, - { - "code": "def breakfast(self, message=\"Breakfast is ready\", shout: bool = False):\n return self.helper.output(message, shout)", - "docstring": "Say something in the morning" - }, - { - "code": "def write_config(config, app_dir, filename='configuration.json'):\n path = os.path.join(app_dir, filename)\n with open(path, 'w') as f:\n json.dump(\n config, f, indent=4, cls=DetectMissingEncoder,\n separators=(',', ': '))", - "docstring": "Write configuration to the applicaiton directory." - }, - { - "code": "def create_logger(name,\n filename=None,\n logging_level=logging.DEBUG):\n logger = logging.getLogger(name)\n formatter = logging.Formatter(('%(asctime)s - %(name)s - '\n '%(levelname)-8s - %(message)s'))\n if filename:\n fh = logging.FileHandler(filename=filename)\n fh.setFormatter(formatter)\n logger.addHandler(fh)\n ch = logging.StreamHandler()\n ch.setFormatter(formatter)\n logger.addHandler(ch)\n logger.setLevel(logging_level)\n return logger", - "docstring": "Create a logger object." - }, - { - "code": "def generate_sphere(radius):\n rint = np.ceil(radius).astype('int')\n t = np.arange(-rint, rint+1, 1)\n x,y,z = np.meshgrid(t, t, t, indexing='ij')\n r = np.sqrt(x*x + y*y + z*z)\n sphere = r < radius\n return sphere", - "docstring": "Generates a centered boolean mask of a 3D sphere" - }, - { - "code": "def fetch(self):\n from ..iq import Iq\n jid,node = self.address\n iq = Iq(to_jid = jid, stanza_type = \"get\")\n disco = self.disco_class(node)\n iq.add_content(disco.xmlnode)\n self.stream.set_response_handlers(iq,self.__response, self.__error,\n self.__timeout)\n self.stream.send(iq)", - "docstring": "Initialize the Service Discovery process." - }, - { - "code": "def indexImages(folder,fname=\"index.html\"):\n html=\"\"\n for item in glob.glob(folder+\"/*.*\"):\n if item.split(\".\")[-1] in ['jpg','png']:\n html+=\"

%s

\"%os.path.basename(item)\n html+=''%os.path.basename(item)\n html+='
'*10\n html+=\"\"\n f=open(folder+\"/\"+fname,'w')\n f.write(html)\n f.close\n print(\"indexed:\")\n print(\" \",os.path.abspath(folder+\"/\"+fname))\n return", - "docstring": "OBSOLETE WAY TO INDEX A FOLDER." - }, - { - "code": "def collect_links(self, env=None):\n for asset in self.assets.values():\n if asset.has_bundles():\n asset.collect_files()\n if env is None:\n env = self.config.env\n if env == static_bundle.ENV_PRODUCTION:\n self._minify(emulate=True)\n self._add_url_prefix()", - "docstring": "Return links without build files" - }, - { - "code": "def getParent2(abfFname,groups):\n if \".abf\" in abfFname:\n abfFname=os.path.basename(abfFname).replace(\".abf\",\"\")\n for parentID in groups.keys():\n if abfFname in groups[parentID]:\n return parentID\n return abfFname", - "docstring": "given an ABF and the groups dict, return the ID of its parent." - }, - { - "code": "def _assign_enterprise_role_to_users(self, _get_batch_method, options, is_feature_role=False):\n role_name = options['role']\n batch_limit = options['batch_limit']\n batch_sleep = options['batch_sleep']\n batch_offset = options['batch_offset']\n current_batch_index = batch_offset\n users_batch = _get_batch_method(\n batch_offset,\n batch_offset + batch_limit\n )\n role_class = SystemWideEnterpriseRole\n role_assignment_class = SystemWideEnterpriseUserRoleAssignment\n if is_feature_role:\n role_class = EnterpriseFeatureRole\n role_assignment_class = EnterpriseFeatureUserRoleAssignment\n enterprise_role = role_class.objects.get(name=role_name)\n while users_batch.count() > 0:\n for index, user in enumerate(users_batch):\n LOGGER.info(\n 'Processing user with index %s and id %s',\n current_batch_index + index, user.id\n )\n role_assignment_class.objects.get_or_create(\n user=user,\n role=enterprise_role\n )\n sleep(batch_sleep)\n current_batch_index += len(users_batch)\n users_batch = _get_batch_method(\n current_batch_index,\n current_batch_index + batch_limit\n )", - "docstring": "Assigns enterprise role to users." - }, - { - "code": "def _escape_str_id(id_str):\n for c in (\"'\", '\"'):\n if id_str.startswith(c) and id_str.endswith(c) \\\n and id_str.count(c) == 2:\n id_str = id_str.strip(c)\n for char, escaped_char in _renames:\n id_str = id_str.replace(char, escaped_char)\n return id_str", - "docstring": "make a single string id SBML compliant" - }, - { - "code": "def create_template(\n self,\n name,\n subject,\n html,\n text='',\n timeout=None\n ):\n payload = {\n 'name': name,\n 'subject': subject,\n 'html': html,\n 'text': text\n }\n return self._api_request(\n self.TEMPLATES_ENDPOINT,\n self.HTTP_POST,\n payload=payload,\n timeout=timeout\n )", - "docstring": "API call to create a template" - }, - { - "code": "async def manage(self):\n cm = _ContextManager(self.database)\n if isinstance(self.database.obj, AIODatabase):\n cm.connection = await self.database.async_connect()\n else:\n cm.connection = self.database.connect()\n return cm", - "docstring": "Manage a database connection." - }, - { - "code": "def do_windowed(self, line):\n self.bot.canvas.sink.trigger_fullscreen_action(False)\n print(self.response_prompt, file=self.stdout)", - "docstring": "Un-fullscreen the current window" - }, - { - "code": "def count_citation_years(graph: BELGraph) -> typing.Counter[int]:\n result = defaultdict(set)\n for _, _, data in graph.edges(data=True):\n if CITATION not in data or CITATION_DATE not in data[CITATION]:\n continue\n try:\n dt = _ensure_datetime(data[CITATION][CITATION_DATE])\n result[dt.year].add((data[CITATION][CITATION_TYPE], data[CITATION][CITATION_REFERENCE]))\n except Exception:\n continue\n return count_dict_values(result)", - "docstring": "Count the number of citations from each year." - }, - { - "code": "def _log_prior_transit(theta, priorbounds):\n allowed = True\n for ix, key in enumerate(np.sort(list(priorbounds.keys()))):\n if priorbounds[key][0] < theta[ix] < priorbounds[key][1]:\n allowed = True and allowed\n else:\n allowed = False\n if allowed:\n return 0.\n return -np.inf", - "docstring": "Assume priors on all parameters have uniform probability." - }, - { - "code": "def _select_block(str_in, start_tag, end_tag):\n start_pos = str_in.find(start_tag)\n if start_pos < 0:\n raise ValueError('start_tag not found')\n depth = 0\n for pos in range(start_pos, len(str_in)):\n if str_in[pos] == start_tag:\n depth += 1\n elif str_in[pos] == end_tag:\n depth -= 1\n if depth == 0:\n break\n sel = str_in[start_pos + 1:pos]\n return sel", - "docstring": "Select first block delimited by start_tag and end_tag" - }, - { - "code": "def cleanup_none(self):\n for (prop, default) in self.defaults.items():\n if getattr(self, prop) == '_None':\n setattr(self, prop, None)", - "docstring": "Removes the temporary value set for None attributes." - }, - { - "code": "def file_do(self, filename):\n log.info('Executing '+filename)\n res = self.__exchange('dofile(\"'+filename+'\")')\n log.info(res)\n return res", - "docstring": "Execute a file on the device using 'do" - }, - { - "code": "def sourceDirValidationError(dirname, component_name):\n if dirname == component_name:\n return 'Module %s public include directory %s should not contain source files' % (component_name, dirname)\n elif dirname.lower() in ('source', 'src') and dirname != 'source':\n return 'Module %s has non-standard source directory name: \"%s\" should be \"source\"' % (component_name, dirname)\n elif isPotentialTestDir(dirname) and dirname != 'test':\n return 'Module %s has non-standard test directory name: \"%s\" should be \"test\"' % (component_name, dirname)\n elif not Source_Dir_Regex.match(dirname):\n corrected = Source_Dir_Invalid_Regex.sub('', dirname.lower())\n if not corrected:\n corrected = 'source'\n return 'Module %s has non-standard source directory name: \"%s\" should be \"%s\"' % (component_name, dirname, corrected)\n else:\n return None", - "docstring": "validate source directory names in components" - }, - { - "code": "def styles(self):\n styles = get_all_styles()\n whitelist = self.app.config.get('CSL_STYLES_WHITELIST')\n if whitelist:\n return {k: v for k, v in styles.items() if k in whitelist}\n return styles", - "docstring": "Get a dictionary of CSL styles." - }, - { - "code": "def on_map_fragment_created(self, obj_id):\n self.fragment = MapFragment(__id__=obj_id)\n self.map.onMapReady.connect(self.on_map_ready)\n self.fragment.getMapAsync(self.map.getId())\n context = self.get_context()\n def on_transaction(id):\n trans = FragmentTransaction(__id__=id)\n trans.add(self.widget.getId(), self.fragment)\n trans.commit()\n def on_fragment_manager(id):\n fm = FragmentManager(__id__=id)\n fm.beginTransaction().then(on_transaction)\n context.widget.getSupportFragmentManager().then(on_fragment_manager)", - "docstring": "Create the fragment and pull the map reference when it's loaded." - }, - { - "code": "def _reassemble_binder(id, tree, metadata):\n binder = cnxepub.Binder(id, metadata=metadata)\n for item in tree['contents']:\n node = _node_to_model(item, parent=binder)\n if node.metadata['title'] != item['title']:\n binder.set_title_for_node(node, item['title'])\n return binder", - "docstring": "Reassemble a Binder object coming out of the database." - }, - { - "code": "def setup_components_and_tf_funcs(self, custom_getter=None):\n custom_getter = super(QDemoModel, self).setup_components_and_tf_funcs(custom_getter)\n self.demo_memory = Replay(\n states=self.states_spec,\n internals=self.internals_spec,\n actions=self.actions_spec,\n include_next_states=True,\n capacity=self.demo_memory_capacity,\n scope='demo-replay',\n summary_labels=self.summary_labels\n )\n self.fn_import_demo_experience = tf.make_template(\n name_='import-demo-experience',\n func_=self.tf_import_demo_experience,\n custom_getter_=custom_getter\n )\n self.fn_demo_loss = tf.make_template(\n name_='demo-loss',\n func_=self.tf_demo_loss,\n custom_getter_=custom_getter\n )\n self.fn_combined_loss = tf.make_template(\n name_='combined-loss',\n func_=self.tf_combined_loss,\n custom_getter_=custom_getter\n )\n self.fn_demo_optimization = tf.make_template(\n name_='demo-optimization',\n func_=self.tf_demo_optimization,\n custom_getter_=custom_getter\n )\n return custom_getter", - "docstring": "Constructs the extra Replay memory." - }, - { - "code": "def write(self, album, media_group):\n from sigal import __url__ as sigal_link\n file_path = os.path.join(album.dst_path, media_group[0].filename)\n page = self.template.render({\n 'album': album,\n 'media': media_group[0],\n 'previous_media': media_group[-1],\n 'next_media': media_group[1],\n 'index_title': self.index_title,\n 'settings': self.settings,\n 'sigal_link': sigal_link,\n 'theme': {'name': os.path.basename(self.theme),\n 'url': url_from_path(os.path.relpath(self.theme_path,\n album.dst_path))},\n })\n output_file = \"%s.html\" % file_path\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(page)", - "docstring": "Generate the media page and save it" - }, - { - "code": "def _get_stream_id(comp_name, stream_id):\n proto_stream_id = topology_pb2.StreamId()\n proto_stream_id.id = stream_id\n proto_stream_id.component_name = comp_name\n return proto_stream_id", - "docstring": "Returns a StreamId protobuf message" - }, - { - "code": "def process_response(self, resp, multiple_rates):\n self._check_for_exceptions(resp, multiple_rates)\n rates = {}\n for result in resp['results']:\n rate = ZipTaxClient._cast_tax_rate(result['taxSales'])\n rates[result['geoCity']] = rate\n if not multiple_rates:\n return rates[list(rates.keys())[0]]\n return rates", - "docstring": "Get the tax rate from the ZipTax response" - }, - { - "code": "def mask_circular_annular_from_shape_pixel_scale_and_radii(shape, pixel_scale, inner_radius_arcsec, outer_radius_arcsec,\n centre=(0.0, 0.0)):\n mask = np.full(shape, True)\n centres_arcsec = mask_centres_from_shape_pixel_scale_and_centre(shape=mask.shape, pixel_scale=pixel_scale, centre=centre)\n for y in range(mask.shape[0]):\n for x in range(mask.shape[1]):\n y_arcsec = (y - centres_arcsec[0]) * pixel_scale\n x_arcsec = (x - centres_arcsec[1]) * pixel_scale\n r_arcsec = np.sqrt(x_arcsec ** 2 + y_arcsec ** 2)\n if outer_radius_arcsec >= r_arcsec >= inner_radius_arcsec:\n mask[y, x] = False\n return mask", - "docstring": "Compute an annular masks from an input inner and outer masks radius and regular shape." - }, - { - "code": "def ci(ctx):\n opts = ['']\n if os.environ.get('TRAVIS', '').lower() == 'true':\n opts += ['test.pytest']\n else:\n opts += ['test.tox']\n ctx.run(\"invoke --echo --pty clean --all build --docs check --reports{}\".format(' '.join(opts)))", - "docstring": "Perform continuous integration tasks." - }, - { - "code": "def paginate_link_tag(item):\n a_tag = Page.default_link_tag(item)\n if item['type'] == 'current_page':\n return make_html_tag('li', a_tag, **{'class': 'blue white-text'})\n return make_html_tag('li', a_tag)", - "docstring": "Create an A-HREF tag that points to another page usable in paginate." - }, - { - "code": "def verify(self):\n value = self.get('verify', 'true')\n if isinstance(value, bool):\n verify = value\n elif value.lower() == 'true':\n verify = True\n elif value.lower() == 'false':\n verify = False\n else:\n verify = value\n return verify", - "docstring": "Verify ssl service certificate." - }, - { - "code": "def _get_output(self, a, image):\n sd = np.square(self._input_images - image)\n mses = np.mean(sd, axis=tuple(range(1, sd.ndim)))\n index = np.argmin(mses)\n if mses[index] > 0:\n raise ValueError('No precomputed output image for this image')\n return self._output_images[index]", - "docstring": "Looks up the precomputed adversarial image for a given image." - }, - { - "code": "def revoke_project_bid(session, bid_id):\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n bid_data = {\n 'action': 'revoke'\n }\n endpoint = 'bids/{}'.format(bid_id)\n response = make_put_request(session, endpoint, headers=headers,\n params_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n json_data = response.json()\n raise BidNotRevokedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Revoke a bid on a project" - }, - { - "code": "def _state_changed(self, state):\n logger.debug('Adapter state change: {0}'.format(state))\n if state == 5:\n self._powered_off.clear()\n self._powered_on.set()\n elif state == 4:\n self._powered_on.clear()\n self._powered_off.set()", - "docstring": "Called when the power state changes." - }, - { - "code": "def start_rpc_listeners(self):\n self._setup_rpc()\n if not self.endpoints:\n return []\n self.conn = n_rpc.create_connection()\n self.conn.create_consumer(self.topic, self.endpoints,\n fanout=False)\n return self.conn.consume_in_threads()", - "docstring": "Configure all listeners here" - }, - { - "code": "def parse(self, source):\n rt, title, title_pic, markdown = libparser.parse(source)\n if rt == -1:\n raise SeparatorNotFound\n elif rt == -2:\n raise PostTitleNotFound\n title, title_pic, markdown = map(to_unicode, (title, title_pic,\n markdown))\n html = self.markdown.render(markdown)\n summary = self.markdown.render(markdown[:200])\n return {\n 'title': title,\n 'markdown': markdown,\n 'html': html,\n 'summary': summary,\n 'title_pic': title_pic\n }", - "docstring": "Parse ascii post source, return dict" - }, - { - "code": "def with_tz(request):\n dt = datetime.now() \n t = Template('{% load tz %}{% localtime on %}{% get_current_timezone as TIME_ZONE %}{{ TIME_ZONE }}{% endlocaltime %}') \n c = RequestContext(request)\n response = t.render(c)\n return HttpResponse(response)", - "docstring": "Get the time with TZ enabled" - }, - { - "code": "def path_to_node(tree, path):\n if path is None:\n return None\n node = tree\n for key in path:\n node = child_by_key(node, key)\n return node", - "docstring": "FST node located at the given path" - }, - { - "code": "def glob_parts(prefix, ext):\n if ext.startswith('.'):\n ext = ext[1:]\n files = glob.glob(prefix+'.'+ext) + glob.glob(prefix+'.part[0-9][0-9][0-9][0-9].'+ext)\n files.sort()\n return files", - "docstring": "Find files from a continuation run" - }, - { - "code": "def extract(self):\n if self.parent:\n try:\n self.parent.contents.remove(self)\n except ValueError:\n pass\n lastChild = self._lastRecursiveChild()\n nextElement = lastChild.next\n if self.previous:\n self.previous.next = nextElement\n if nextElement:\n nextElement.previous = self.previous\n self.previous = None\n lastChild.next = None\n self.parent = None\n if self.previousSibling:\n self.previousSibling.nextSibling = self.nextSibling\n if self.nextSibling:\n self.nextSibling.previousSibling = self.previousSibling\n self.previousSibling = self.nextSibling = None\n return self", - "docstring": "Destructively rips this element out of the tree." - }, - { - "code": "def compact_name(self, hashsize=6):\n s = self.compact_name_core(hashsize, t_max=True)\n s += \"_ID%d-%d\" % (self.ID, self.EID)\n return s", - "docstring": "Compact representation of all simulation parameters" - }, - { - "code": "def dump(grids, mode=MODE_ZINC):\n if isinstance(grids, Grid):\n return dump_grid(grids, mode=mode)\n _dump = functools.partial(dump_grid, mode=mode)\n if mode == MODE_ZINC:\n return '\\n'.join(map(_dump, grids))\n elif mode == MODE_JSON:\n return '[%s]' % ','.join(map(_dump, grids))\n else:\n raise NotImplementedError('Format not implemented: %s' % mode)", - "docstring": "Dump the given grids in the specified over-the-wire format." - }, - { - "code": "def combinefiles(filepath):\n fastqs = glob.glob(filepath)\n firsts = [i for i in fastqs if \"_R1_\" in i]\n if not firsts:\n raise IPyradWarningExit(\"First read files names must contain '_R1_'.\")\n seconds = [ff.replace(\"_R1_\", \"_R2_\") for ff in firsts]\n return zip(firsts, seconds)", - "docstring": "Joins first and second read file names" - }, - { - "code": "def write(self, path):\n with open(path, \"wb\") as fout:\n fout.write(self.m_buf)", - "docstring": "Write buffer to file" - }, - { - "code": "def locate(pattern, root=os.curdir):\n for path, dummy, files in os.walk(os.path.abspath(root)):\n for filename in fnmatch.filter(files, pattern):\n yield os.path.join(path, filename)", - "docstring": "Locate all files matching supplied filename pattern recursively." - }, - { - "code": "def rssi(self, timeout_sec=TIMEOUT_SEC):\n self._rssi_read.clear()\n self._peripheral.readRSSI()\n if not self._rssi_read.wait(timeout_sec):\n raise RuntimeError('Exceeded timeout waiting for RSSI value!')\n return self._rssi", - "docstring": "Return the RSSI signal strength in decibels." - }, - { - "code": "def interleaves(self, info):\n return info.byte_offset == self.component_type.size * self.components", - "docstring": "Does the buffer interleave with this one?" - }, - { - "code": "def _storeSample(self, inputVector, trueCatIndex, partition=0):\n if self._samples is None:\n self._samples = numpy.zeros((0, len(inputVector)), dtype=RealNumpyDType)\n assert self._labels is None\n self._labels = []\n self._samples = numpy.concatenate((self._samples, numpy.atleast_2d(inputVector)), axis=0)\n self._labels += [trueCatIndex]\n if self._partitions is None:\n self._partitions = []\n if partition is None:\n partition = 0\n self._partitions += [partition]", - "docstring": "Store a training sample and associated category label" - }, - { - "code": "def write(self, album):\n page = self.template.render(**self.generate_context(album))\n output_file = os.path.join(album.dst_path, album.output_file)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(page)", - "docstring": "Generate the HTML page and save it." - }, - { - "code": "def point(self):\n string = unhexlify(self.unCompressed())\n return ecdsa.VerifyingKey.from_string(\n string[1:], curve=ecdsa.SECP256k1\n ).pubkey.point", - "docstring": "Return the point for the public key" - }, - { - "code": "def run(command, parser, cl_args, unknown_args):\n Log.debug(\"Update Args: %s\", cl_args)\n extra_lib_jars = jars.packing_jars()\n action = \"update topology%s\" % (' in dry-run mode' if cl_args[\"dry_run\"] else '')\n dict_extra_args = {}\n try:\n dict_extra_args = build_extra_args_dict(cl_args)\n except Exception as err:\n return SimpleResult(Status.InvocationError, err.message)\n if cl_args['deploy_mode'] == config.SERVER_MODE:\n return cli_helper.run_server(command, cl_args, action, dict_extra_args)\n else:\n list_extra_args = convert_args_dict_to_list(dict_extra_args)\n return cli_helper.run_direct(command, cl_args, action, list_extra_args, extra_lib_jars)", - "docstring": "run the update command" - }, - { - "code": "def _list_itemstrs(list_, **kwargs):\n items = list(list_)\n kwargs['_return_info'] = True\n _tups = [repr2(item, **kwargs) for item in items]\n itemstrs = [t[0] for t in _tups]\n max_height = max([t[1]['max_height'] for t in _tups]) if _tups else 0\n _leaf_info = {\n 'max_height': max_height + 1,\n }\n sort = kwargs.get('sort', None)\n if sort is None:\n sort = isinstance(list_, (set, frozenset))\n if sort:\n itemstrs = _sort_itemstrs(items, itemstrs)\n return itemstrs, _leaf_info", - "docstring": "Create a string representation for each item in a list." - }, - { - "code": "def read_openke_translation(filename, delimiter='\\t', entity_first=True):\n result = {}\n with open(filename, \"r\") as f:\n _ = next(f)\n for line in f:\n line_slice = line.rstrip().split(delimiter)\n if not entity_first:\n line_slice = list(reversed(line_slice))\n result[line_slice[0]] = line_slice[1]\n return result", - "docstring": "Returns map with entity or relations from plain text." - }, - { - "code": "def replace_bases(self, old, new):\n self.seq = self.seq.replace(old, new)", - "docstring": "Replaces all occurrences of 'old' with 'new'" - }, - { - "code": "def str_to_list(s):\n if s is None:\n return []\n elif isinstance(s, (tuple, list)):\n return s\n elif not isinstance(s, six.string_types):\n raise NotImplementedError('Unknown type: %s' % type(s))\n return [_.strip().lower() for _ in (s or '').split(',') if _.strip()]", - "docstring": "Converts a string of comma delimited values and returns a list." - }, - { - "code": "def do_vars(self, line):\n if self.bot._vars:\n max_name_len = max([len(name) for name in self.bot._vars])\n for i, (name, v) in enumerate(self.bot._vars.items()):\n keep = i < len(self.bot._vars) - 1\n self.print_response(\"%s = %s\" % (name.ljust(max_name_len), v.value), keep=keep)\n else:\n self.print_response(\"No vars\")", - "docstring": "List bot variables and values" - }, - { - "code": "def dasherize(value):\n value = value.strip()\n value = re.sub(r'([A-Z])', r'-\\1', value)\n value = re.sub(r'[-_\\s]+', r'-', value)\n value = re.sub(r'^-', r'', value)\n value = value.lower()\n return value", - "docstring": "Dasherizes the passed value." - }, - { - "code": "def reduce_opacity(im, opacity):\n assert opacity >= 0 and opacity <= 1\n if im.mode != 'RGBA':\n im = im.convert('RGBA')\n else:\n im = im.copy()\n alpha = im.split()[3]\n alpha = ImageEnhance.Brightness(alpha).enhance(opacity)\n im.putalpha(alpha)\n return im", - "docstring": "Returns an image with reduced opacity." - }, - { - "code": "def deploy(self):\n for service in self.genv.services:\n service = service.strip().upper()\n funcs = common.service_deployers.get(service)\n if funcs:\n print('Deploying service %s...' % (service,))\n for func in funcs:\n if not self.dryrun:\n func()", - "docstring": "Applies routine, typically application-level changes to the service." - }, - { - "code": "def full(shape, value, dtype='f8'):\n shared = empty(shape, dtype)\n shared[:] = value\n return shared", - "docstring": "Create a shared memory array of given shape and type, filled with `value`." - }, - { - "code": "def to_table(metrics):\n all_queries = tracker_access.metric_queries()\n m = tracker_access.queries_map()\n names = metrics.values()[0].keys()\n stats = []\n for n in names:\n info = [n]\n for field in all_queries:\n try:\n info.append(str(metrics[field][n]))\n except KeyError:\n pass\n stats.append(info)\n header = ['container id'] + [m[k] for k in all_queries if k in metrics.keys()]\n return stats, header", - "docstring": "normalize raw metrics API result to table" - }, - { - "code": "def clear_display_buffer(self):\n for row in range(0, 8):\n self.firmata.i2c_write(0x70, row * 2, 0, 0)\n self.firmata.i2c_write(0x70, (row * 2) + 1, 0, 0)\n for column in range(0, 8):\n self.display_buffer[row][column] = 0", - "docstring": "Set all led's to off." - }, - { - "code": "def show_version(self):\n class ShowVersionAction(argparse.Action):\n def __init__(inner_self, nargs=0, **kw):\n super(ShowVersionAction, inner_self).__init__(nargs=nargs, **kw)\n def __call__(inner_self, parser, args, value, option_string=None):\n print(\"{parser_name} version: {version}\".format(\n parser_name=self.config.get(\n \"parser\", {}).get(\"prog\"),\n version=self.prog_version))\n return ShowVersionAction", - "docstring": "custom command line action to show version" - }, - { - "code": "def _add_parsley_ns(cls, namespace_dict):\n namespace_dict.update({\n 'parslepy' : cls.LOCAL_NAMESPACE,\n 'parsley' : cls.LOCAL_NAMESPACE,\n })\n return namespace_dict", - "docstring": "Extend XPath evaluation with Parsley extensions' namespace" - }, - { - "code": "def deserialize(cls, serializer, wf_spec, s_state, **kwargs):\n return serializer.deserialize_trigger(wf_spec,\n s_state,\n **kwargs)", - "docstring": "Deserializes the trigger using the provided serializer." - }, - { - "code": "def add_tags(self, tags):\n return self.get_data(\n \"firewalls/%s/tags\" % self.id,\n type=POST,\n params={\"tags\": tags}\n )", - "docstring": "Add tags to this Firewall." - }, - { - "code": "def make_random_contigs(contigs, length, outfile, name_by_letters=False, prefix='', seed=None, first_number=1):\n random.seed(a=seed)\n fout = utils.open_file_write(outfile)\n letters = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')\n letters_index = 0\n for i in range(contigs):\n if name_by_letters:\n name = letters[letters_index]\n letters_index += 1\n if letters_index == len(letters):\n letters_index = 0\n else:\n name = str(i + first_number)\n fa = sequences.Fasta(prefix + name, ''.join([random.choice('ACGT') for x in range(length)]))\n print(fa, file=fout)\n utils.close(fout)", - "docstring": "Makes a multi fasta file of random sequences, all the same length" - }, - { - "code": "def iterscan(self, string, idx=0, context=None):\n match = self.scanner.scanner(string, idx).match\n actions = self.actions\n lastend = idx\n end = len(string)\n while True:\n m = match()\n if m is None:\n break\n matchbegin, matchend = m.span()\n if lastend == matchend:\n break\n action = actions[m.lastindex]\n if action is not None:\n rval, next_pos = action(m, context)\n if next_pos is not None and next_pos != matchend:\n matchend = next_pos\n match = self.scanner.scanner(string, matchend).match\n yield rval, matchend\n lastend = matchend", - "docstring": "Yield match, end_idx for each match" - }, - { - "code": "def __intermediate_proto(self, interface, address):\n address_proto = address.pop('proto', 'static')\n if 'proto' not in interface:\n return address_proto\n else:\n return interface.pop('proto')", - "docstring": "determines UCI interface \"proto\" option" - }, - { - "code": "def MLOAD(self, address):\n self._allocate(address, 32)\n value = self._load(address, 32)\n return value", - "docstring": "Load word from memory" - }, - { - "code": "def chunk_clusters(data, sample):\n num = 0\n optim = int((sample.stats.clusters_total // data.cpus) + \\\n (sample.stats.clusters_total % data.cpus))\n chunkslist = []\n with gzip.open(sample.files.clusters, 'rb') as clusters:\n pairdealer = itertools.izip(*[iter(clusters)]*2)\n done = 0\n while not done:\n done, chunk = clustdealer(pairdealer, optim)\n chunkhandle = os.path.join(data.dirs.clusts,\n \"tmp_\"+str(sample.name)+\".\"+str(num*optim))\n if chunk:\n chunkslist.append((optim, chunkhandle))\n with open(chunkhandle, 'wb') as outchunk:\n outchunk.write(\"//\\n//\\n\".join(chunk)+\"//\\n//\\n\")\n num += 1\n return chunkslist", - "docstring": "split job into bits and pass to the client" - }, - { - "code": "def _validate_label(cls, name, value):\n cls._check_label_name(name)\n cls._check_label_value(value)\n if not cls._allow_reserved_keys and name in RESERVED_LABELS:\n raise ValueError('Label flag (%s=...) must not use reserved keys: %r' %\n (name, list(RESERVED_LABELS)))", - "docstring": "Raise ValueError if the label is invalid." - }, - { - "code": "def _status(self):\n job_id_list = ' '.join(self.resources.keys())\n cmd = \"condor_q {0} -af:jr JobStatus\".format(job_id_list)\n retcode, stdout, stderr = super().execute_wait(cmd)\n for line in stdout.strip().split('\\n'):\n parts = line.split()\n job_id = parts[0]\n status = translate_table.get(parts[1], 'UNKNOWN')\n self.resources[job_id]['status'] = status", - "docstring": "Update the resource dictionary with job statuses." - }, - { - "code": "def delete(self, request, response):\n if self.slug is None:\n raise http.exceptions.NotImplemented()\n self.assert_operations('destroy')\n self.destroy()\n self.response.status = http.client.NO_CONTENT\n self.make_response()", - "docstring": "Processes a `DELETE` request." - }, - { - "code": "def _create_pattern_set(self, pattern, values):\n type_ = self._get_type(values)\n version = self._get_version(values)\n comment = values.get(COMMENT)\n self._pattern_set = self._spec.new_pattern_set(\n type_, version, pattern, self, comment\n )", - "docstring": "Create a new pattern set." - }, - { - "code": "def _update_dict(data, default_data, replace_data=False):\n if not data:\n data = default_data.copy()\n return data\n if not isinstance(data, dict):\n raise TypeError('Value not dict type')\n if len(data) > 255:\n raise ValueError('More than 255 values defined')\n for i in data.keys():\n if not isinstance(i, int):\n raise TypeError('Index not int type')\n if i < 0 or i > 255:\n raise ValueError('Index value out of range')\n if not replace_data:\n data.update(default_data)\n return data", - "docstring": "Update algorithm definition type dictionaries" - }, - { - "code": "def pre_deploy(self):\n for service in self.genv.services:\n service = service.strip().upper()\n funcs = common.service_pre_deployers.get(service)\n if funcs:\n print('Running pre-deployments for service %s...' % (service,))\n for func in funcs:\n func()", - "docstring": "Runs methods services have requested be run before each deployment." - }, - { - "code": "def _request(self, failure, endpoints, *args, **kwargs):\n if not endpoints:\n return failure\n endpoint = endpoints.pop(0)\n d = super(MarathonClient, self).request(*args, url=endpoint, **kwargs)\n d.addErrback(self._request, endpoints, *args, **kwargs)\n return d", - "docstring": "Recursively make requests to each endpoint in ``endpoints``." - }, - { - "code": "def site_path(self):\n if platform == 'win':\n return unipath(self.path, 'Lib', 'site-packages')\n py_ver = 'python{0}'.format(sys.version[:3])\n return unipath(self.path, 'lib', py_ver, 'site-packages')", - "docstring": "Path to environments site-packages" - }, - { - "code": "def redirect(cls, request, response):\n if cls.meta.legacy_redirect:\n if request.method in ('GET', 'HEAD',):\n response.status = http.client.MOVED_PERMANENTLY\n else:\n response.status = http.client.TEMPORARY_REDIRECT\n else:\n response.status = http.client.PERMANENT_REDIRECT\n response.close()", - "docstring": "Redirect to the canonical URI for this resource." - }, - { - "code": "def run(self):\n if not self.device:\n return\n try:\n data = \"\"\n while (self.do_run):\n try:\n if (self.device.inWaiting() > 1):\n l = self.device.readline()[:-2]\n l = l.decode(\"UTF-8\")\n if (l == \"[\"):\n data = \"[\"\n elif (l == \"]\") and (len(data) > 4) and (data[0] == \"[\"):\n data = data + \"]\"\n self.store.register_json(data)\n self.age()\n elif (l[0:3] == \" {\"):\n data = data + \" \" + l\n else:\n sleep(1)\n self.age()\n except (UnicodeDecodeError, ValueError):\n data = \"\"\n self.age()\n except serial.serialutil.SerialException:\n print(\"Could not connect to the serial line at \" + self.device_name)", - "docstring": "Open a connection over the serial line and receive data lines" - }, - { - "code": "def collect_keyword (sent, ranks, stopwords):\n for w in sent:\n if (w.word_id > 0) and (w.root in ranks) and (w.pos[0] in \"NV\") and (w.root not in stopwords):\n rl = RankedLexeme(text=w.raw.lower(), rank=ranks[w.root]/2.0, ids=[w.word_id], pos=w.pos.lower(), count=1)\n if DEBUG:\n print(rl)\n yield rl", - "docstring": "iterator for collecting the single-word keyphrases" - }, - { - "code": "def compile_glob(spec):\n parsed = \"\".join(parse_glob(spec))\n regex = \"^{0}$\".format(parsed)\n return re.compile(regex)", - "docstring": "Convert the given glob `spec` to a compiled regex." - }, - { - "code": "def min_conflicts(csp, max_steps=100000):\n csp.current = current = {}\n for var in csp.vars:\n val = min_conflicts_value(csp, var, current)\n csp.assign(var, val, current)\n for i in range(max_steps):\n conflicted = csp.conflicted_vars(current)\n if not conflicted:\n return current\n var = random.choice(conflicted)\n val = min_conflicts_value(csp, var, current)\n csp.assign(var, val, current)\n return None", - "docstring": "Solve a CSP by stochastic hillclimbing on the number of conflicts." - }, - { - "code": "def _canonicalize(self, filename):\n path, ext = os.path.splitext(filename)\n if not ext:\n ext = \".collection\"\n return path + ext", - "docstring": "Use .collection as extension unless provided" - }, - { - "code": "def wait_for_region_to_load(self):\n self.wait.until(lambda _: self.loaded)\n self.pm.hook.pypom_after_wait_for_region_to_load(region=self)\n return self", - "docstring": "Wait for the page region to load." - }, - { - "code": "def add_active_module(module):\n modules = set(get_active_modules())\n modules.add(module)\n new_modules_path = os.pathsep.join([m.path for m in modules])\n os.environ['CPENV_ACTIVE_MODULES'] = str(new_modules_path)", - "docstring": "Add a module to CPENV_ACTIVE_MODULES environment variable" - }, - { - "code": "def sha1(self):\n with open(self.path, 'rb') as f:\n return hashlib.sha1(f.read()).hexdigest()", - "docstring": "SHA1 hash of the config file itself." - }, - { - "code": "def _engineServicesRunning():\n process = subprocess.Popen([\"ps\", \"aux\"], stdout=subprocess.PIPE)\n stdout = process.communicate()[0]\n result = process.returncode\n if result != 0:\n raise RuntimeError(\"Unable to check for running client job manager\")\n running = False\n for line in stdout.split(\"\\n\"):\n if \"python\" in line and \"clientjobmanager.client_job_manager\" in line:\n running = True\n break\n return running", - "docstring": "Return true if the engine services are running" - }, - { - "code": "def alignment(self, d=5):\n vx = vy = vz = 0\n for b in self.boids:\n if b != self:\n vx, vy, vz = vx+b.vx, vy+b.vy, vz+b.vz\n n = len(self.boids)-1\n vx, vy, vz = vx/n, vy/n, vz/n\n return (vx-self.vx)/d, (vy-self.vy)/d, (vz-self.vz)/d", - "docstring": "Boids match velocity with other boids." - }, - { - "code": "def sanitize_filename(filename):\n token = generate_drop_id()\n name, extension = splitext(filename)\n if extension:\n return '%s%s' % (token, extension)\n else:\n return token", - "docstring": "preserve the file ending, but replace the name with a random token" - }, - { - "code": "def create_socket_options():\n sys_config = system_config.get_sys_config()\n opt_list = [const.INSTANCE_NETWORK_WRITE_BATCH_SIZE_BYTES,\n const.INSTANCE_NETWORK_WRITE_BATCH_TIME_MS,\n const.INSTANCE_NETWORK_READ_BATCH_SIZE_BYTES,\n const.INSTANCE_NETWORK_READ_BATCH_TIME_MS,\n const.INSTANCE_NETWORK_OPTIONS_SOCKET_RECEIVED_BUFFER_SIZE_BYTES,\n const.INSTANCE_NETWORK_OPTIONS_SOCKET_SEND_BUFFER_SIZE_BYTES]\n Log.debug(\"In create_socket_options()\")\n try:\n value_lst = [int(sys_config[opt]) for opt in opt_list]\n sock_opt = SocketOptions(*value_lst)\n return sock_opt\n except ValueError as e:\n raise ValueError(\"Invalid value in sys_config: %s\" % str(e))\n except KeyError as e:\n raise KeyError(\"Incomplete sys_config: %s\" % str(e))", - "docstring": "Creates SocketOptions object from a given sys_config dict" - }, - { - "code": "def matches(self, pattern):\n if not isinstance(self.val, str_types):\n raise TypeError('val is not a string')\n if not isinstance(pattern, str_types):\n raise TypeError('given pattern arg must be a string')\n if len(pattern) == 0:\n raise ValueError('given pattern arg must not be empty')\n if re.search(pattern, self.val) is None:\n self._err('Expected <%s> to match pattern <%s>, but did not.' % (self.val, pattern))\n return self", - "docstring": "Asserts that val is string and matches regex pattern." - }, - { - "code": "async def get_tracks(self, query):\r\n log.debug('Requesting tracks for query {}'.format(query))\r\n async with self.http.get(self.rest_uri + quote(query), headers={'Authorization': self.password}) as res:\r\n return await res.json(content_type=None)", - "docstring": "Returns a Dictionary containing search results for a given query." - }, - { - "code": "async def _load(self):\n try:\n conv_events = await self._conversation.get_events(\n self._conversation.events[0].id_\n )\n except (IndexError, hangups.NetworkError):\n conv_events = []\n if not conv_events:\n self._first_loaded = True\n if self._focus_position == self.POSITION_LOADING and conv_events:\n self.set_focus(conv_events[-1].id_)\n else:\n self._modified()\n self._refresh_watermarked_events()\n self._is_loading = False", - "docstring": "Load more events for this conversation." - }, - { - "code": "def time(self, t):\n _time = arrow.get(t).format('YYYY-MM-DDTHH:mm:ss')\n self._time = datetime.datetime.strptime(_time, '%Y-%m-%dT%H:%M:%S')", - "docstring": "Convert any timestamp into a datetime and save as _time" - }, - { - "code": "def _getJson(url, token='', version=''):\n if token:\n return _getJsonIEXCloud(url, token, version)\n return _getJsonOrig(url)", - "docstring": "for backwards compat, accepting token and version but ignoring" - }, - { - "code": "def nmse(a, b):\n return np.square(a - b).mean() / (a.mean() * b.mean())", - "docstring": "Returns the normalized mean square error of a and b" - }, - { - "code": "def destroy(self):\n marker = self.marker\n parent = self.parent()\n if marker:\n if parent:\n del parent.markers[marker.__id__]\n marker.remove()\n super(AndroidMapItemBase, self).destroy()", - "docstring": "Remove the marker if it was added to the map when destroying" - }, - { - "code": "def transform_courserun_title(self, content_metadata_item):\n title = content_metadata_item.get('title') or ''\n course_run_start = content_metadata_item.get('start')\n if course_run_start:\n if course_available_for_enrollment(content_metadata_item):\n title += ' ({starts}: {:%B %Y})'.format(\n parse_lms_api_datetime(course_run_start),\n starts=_('Starts')\n )\n else:\n title += ' ({:%B %Y} - {enrollment_closed})'.format(\n parse_lms_api_datetime(course_run_start),\n enrollment_closed=_('Enrollment Closed')\n )\n title_with_locales = []\n content_metadata_language_code = transform_language_code(content_metadata_item.get('content_language', ''))\n for locale in self.enterprise_configuration.get_locales(default_locale=content_metadata_language_code):\n title_with_locales.append({\n 'locale': locale,\n 'value': title\n })\n return title_with_locales", - "docstring": "Return the title of the courserun content item." - }, - { - "code": "async def choose_qtm_instance(interface):\n instances = {}\n print(\"Available QTM instances:\")\n async for i, qtm_instance in AsyncEnumerate(qtm.Discover(interface), start=1):\n instances[i] = qtm_instance\n print(\"{} - {}\".format(i, qtm_instance.info))\n try:\n choice = int(input(\"Connect to: \"))\n if choice not in instances:\n raise ValueError\n except ValueError:\n LOG.error(\"Invalid choice\")\n return None\n return instances[choice].host", - "docstring": "List running QTM instances, asks for input and return chosen QTM" - }, - { - "code": "def visualize(self):\n if os.path.isfile(self.workspace):\n t = threading.Thread(target=self.highlight_from_file,\n args=(self.workspace,))\n elif os.path.isdir(self.workspace):\n t = threading.Thread(target=self.highlight_from_dir,\n args=(self.workspace,))\n t.start()", - "docstring": "Given a Manticore workspace, or trace file, highlight the basic blocks." - }, - { - "code": "def format_seconds(self, n_seconds):\n func = self.ok\n if n_seconds >= 60:\n n_minutes, n_seconds = divmod(n_seconds, 60)\n return \"%s minutes %s seconds\" % (\n func(\"%d\" % n_minutes),\n func(\"%.3f\" % n_seconds))\n else:\n return \"%s seconds\" % (\n func(\"%.3f\" % n_seconds))", - "docstring": "Format a time in seconds." - }, - { - "code": "def __pre_delete_receiver(self, instance, **kwargs):\n logger.debug('RECEIVE pre_delete FOR %s', instance.__class__)\n self.delete_record(instance)", - "docstring": "Signal handler for when a registered model has been deleted." - }, - { - "code": "def _runpf_worker(task):\n (lcfile, outdir, timecols, magcols, errcols, lcformat, lcformatdir,\n pfmethods, pfkwargs, getblssnr, sigclip, nworkers, minobservations,\n excludeprocessed) = task\n if os.path.exists(lcfile):\n pfresult = runpf(lcfile,\n outdir,\n timecols=timecols,\n magcols=magcols,\n errcols=errcols,\n lcformat=lcformat,\n lcformatdir=lcformatdir,\n pfmethods=pfmethods,\n pfkwargs=pfkwargs,\n getblssnr=getblssnr,\n sigclip=sigclip,\n nworkers=nworkers,\n minobservations=minobservations,\n excludeprocessed=excludeprocessed)\n return pfresult\n else:\n LOGERROR('LC does not exist for requested file %s' % lcfile)\n return None", - "docstring": "This runs the runpf function." - }, - { - "code": "def position_rates(self):\n return [self.ode_obj.getPositionRate(i) for i in range(self.LDOF)]", - "docstring": "List of position rates for linear degrees of freedom." - }, - { - "code": "def remove_node(self, node):\n self.nodes.remove(node)\n for x in xrange(self.replicas):\n ring_key = self.hash_method(b(\"%s:%d\" % (node, x)))\n self.ring.pop(ring_key)\n self.sorted_keys.remove(ring_key)", - "docstring": "Removes `node` from the hash ring and its replicas." - }, - { - "code": "def _path(self, path):\n mode, encoding = self._mode_and_encoding_for_open()\n with open(path, mode, encoding=encoding) as file:\n self.__dump_to_file(file)", - "docstring": "Saves the dump in a file named `path`." - }, - { - "code": "def require(name, field, data_type):\n if not isinstance(field, data_type):\n msg = '{0} must have {1}, got: {2}'.format(name, data_type, field)\n raise AssertionError(msg)", - "docstring": "Require that the named `field` has the right `data_type`" - }, - { - "code": "def _expand_consumed_mesh(self, mesh, mesh_index, row_position, passed):\n if not mesh.is_produced():\n return\n row = mesh.producing_row\n position = Point(\n row_position.x + mesh.index_in_producing_row - mesh_index,\n row_position.y - INSTRUCTION_HEIGHT\n )\n self._expand(row, position, passed)", - "docstring": "expand the consumed meshes" - }, - { - "code": "def showfig(fig, aspect=\"auto\"):\n ax = fig.gca()\n alim = list(ax.axis())\n if alim[3] < alim[2]:\n temp = alim[2]\n alim[2] = alim[3]\n alim[3] = temp\n ax.axis(alim)\n ax.set_aspect(aspect)\n fig.show()", - "docstring": "Shows a figure with a typical orientation so that x and y axes are set up as expected." - }, - { - "code": "def availableVersions(self):\n r = []\n for t in self._getTags():\n logger.debug(\"available version tag: %s\", t)\n if not len(t[0].strip()):\n continue\n try:\n r.append(GithubComponentVersion(t[0], t[0], url=t[1], name=self.name, cache_key=None))\n except ValueError:\n logger.debug('invalid version tag: %s', t)\n return r", - "docstring": "return a list of Version objects, each with a tarball URL set" - }, - { - "code": "def _generate_refresh_request_body(self):\n body = urllib.parse.urlencode({\n 'grant_type': 'refresh_token',\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'refresh_token': self.refresh_token,\n })\n return body", - "docstring": "Generate the body that will be used in the refresh request." - }, - { - "code": "def convert_args(args, kwargs):\n found = False\n for arg in args:\n if isinstance(arg, Cells):\n found = True\n break\n if found:\n args = tuple(\n arg.value if isinstance(arg, Cells) else arg for arg in args\n )\n if kwargs is not None:\n for key, arg in kwargs.items():\n if isinstance(arg, Cells):\n kwargs[key] = arg.value\n return args, kwargs", - "docstring": "If args and kwargs contains Cells, Convert them to their values." - }, - { - "code": "def handle_reduce(self, reduce_function_names, mapped_docs):\n reduce_functions = []\n for reduce_function_name in reduce_function_names:\n try:\n reduce_function = get_function(reduce_function_name)\n if getattr(reduce_function, 'view_decorated', None):\n reduce_function = reduce_function(self.log)\n reduce_functions.append(reduce_function)\n except Exception, exc:\n self.log(repr(exc))\n reduce_functions.append(lambda *args, **kwargs: None)\n keys, values = zip(\n (key, value) for ((key, doc_id), value) in mapped_docs)\n results = []\n for reduce_function in reduce_functions:\n try:\n results.append(reduce_function(keys, values, rereduce=False))\n except Exception, exc:\n self.log(repr(exc))\n results.append(None)\n return [True, results]", - "docstring": "Reduce several mapped documents by several reduction functions." - }, - { - "code": "def random_product(iter1, iter2):\n iter4 = np.concatenate([\n np.random.choice(iter1, 2, replace=False),\n np.random.choice(iter2, 2, replace=False)\n ])\n return iter4", - "docstring": "Random sampler for equal_splits functions" - }, - { - "code": "def check_clean_status(git_path=None):\n output = get_status(git_path)\n is_unmodified = (len(output.strip()) == 0)\n return is_unmodified", - "docstring": "Returns whether there are uncommitted changes in the working dir." - }, - { - "code": "def retract(self, sentence):\n \"Remove the sentence's clauses from the KB.\"\n for c in conjuncts(to_cnf(sentence)):\n if c in self.clauses:\n self.clauses.remove(c)", - "docstring": "Remove the sentence's clauses from the KB." - }, - { - "code": "def Watson(T, Hvap_ref, T_Ref, Tc, exponent=0.38):\n Tr = T/Tc\n Trefr = T_Ref/Tc\n H2 = Hvap_ref*((1-Tr)/(1-Trefr))**exponent\n return H2", - "docstring": "Adjusts enthalpy of vaporization of enthalpy for another temperature, for one temperature." - }, - { - "code": "def check_max_filesize(chosen_file, max_size):\n if os.path.getsize(chosen_file) > max_size:\n return False\n else:\n return True", - "docstring": "Checks file sizes for host" - }, - { - "code": "def itertable(table):\n for item in table:\n res = {\n k.lower(): nfd(v) if isinstance(v, text_type) else v for k, v in item.items()}\n for extra in res.pop('extra', []):\n k, _, v = extra.partition(':')\n res[k.strip()] = v.strip()\n yield res", - "docstring": "Auxiliary function for iterating over a data table." - }, - { - "code": "def refresh(self):\n args = [(obj.name, obj.value) for obj in self.queryset.all()]\n super(SettingDict, self).update(args)\n self.empty_cache = False", - "docstring": "Updates the cache with setting values from the database." - }, - { - "code": "def check_environment(target, label):\n if not git.exists():\n click.secho('You must have git installed to use yld.', fg='red')\n sys.exit(1)\n if not os.path.isdir('.git'):\n click.secho('You must cd into a git repository to use yld.', fg='red')\n sys.exit(1)\n if not git.is_committed():\n click.secho('You must commit or stash your work before proceeding.',\n fg='red')\n sys.exit(1)\n if target is None and label is None:\n click.secho('You must specify either a target or a label.', fg='red')\n sys.exit(1)", - "docstring": "Performs some environment checks prior to the program's execution" - }, - { - "code": "def main():\n search = ServiceSearch()\n services = search.get_services(up=True, tags=['!header_scan'])\n print_notification(\"Scanning {} services\".format(len(services)))\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n pool = Pool(100)\n count = 0\n for service in services:\n count += 1\n if count % 50 == 0:\n print_notification(\"Checking {}/{} services\".format(count, len(services)))\n pool.spawn(check_service, service)\n pool.join()\n print_notification(\"Completed, 'http' tag added to services that respond to http, 'https' tag added to services that respond to https.\")", - "docstring": "Retrieves services starts check_service in a gevent pool of 100." - }, - { - "code": "def cancel_scheduled_hangup(self, call_params):\n path = '/' + self.api_version + '/CancelScheduledHangup/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Cancel a Scheduled Hangup Helper" - }, - { - "code": "def add_at(self, index: int, requester: int, track: dict):\r\n self.queue.insert(min(index, len(self.queue) - 1), AudioTrack().build(track, requester))", - "docstring": "Adds a track at a specific index in the queue." - }, - { - "code": "def curl(self, url, post):\n try:\n req = urllib2.Request(url)\n req.add_header(\"Content-type\", \"application/xml\")\n data = urllib2.urlopen(req, post.encode('utf-8')).read()\n except urllib2.URLError, v:\n raise AmbientSMSError(v)\n return dictFromXml(data)", - "docstring": "Inteface for sending web requests to the AmbientSMS API Server" - }, - { - "code": "def format(self, record):\n if isinstance(self.fmt, dict):\n self._fmt = self.fmt[record.levelname]\n if sys.version_info > (3, 2):\n if self.style not in logging._STYLES:\n raise ValueError('Style must be one of: %s' % ','.join(\n list(logging._STYLES.keys())))\n self._style = logging._STYLES[self.style][0](self._fmt)\n if sys.version_info > (2, 7):\n message = super(LevelFormatter, self).format(record)\n else:\n message = ColoredFormatter.format(self, record)\n return message", - "docstring": "Customize the message format based on the log level." - }, - { - "code": "def getpath(self, section, option):\n return os.path.expanduser(os.path.expandvars(self.get(section, option)))", - "docstring": "Return option as an expanded path." - }, - { - "code": "def PlugIn(self):\n ids = self.available_ids()\n if len(ids) == 0:\n raise MaxInputsReachedError('Max Inputs Reached')\n self.id = ids[0]\n _xinput.PlugIn(self.id)\n while self.id in self.available_ids():\n pass", - "docstring": "Take next available controller id and plug in to Virtual USB Bus" - }, - { - "code": "def find_sections(lines):\n sections = []\n for line in lines:\n if is_heading(line):\n sections.append(get_heading(line))\n return sections", - "docstring": "Find all section names and return a list with their names." - }, - { - "code": "def loadGrammar(self, grammar, searchpaths=None):\r\n self.grammar = self._load(grammar, searchpaths=searchpaths)\r\n self.refs = {}\r\n for ref in self.grammar.getElementsByTagName(\"ref\"):\r\n self.refs[ref.attributes[\"id\"].value] = ref", - "docstring": "load context-free grammar" - }, - { - "code": "def parse_uri(self, raw_uri, recursive):\n if recursive:\n raw_uri = directory_fmt(raw_uri)\n file_provider = self.parse_file_provider(raw_uri)\n self._validate_paths_or_fail(raw_uri, recursive)\n uri, docker_uri = self.rewrite_uris(raw_uri, file_provider)\n uri_parts = job_model.UriParts(\n directory_fmt(os.path.dirname(uri)), os.path.basename(uri))\n return docker_uri, uri_parts, file_provider", - "docstring": "Return a valid docker_path, uri, and file provider from a flag value." - }, - { - "code": "def run_3to2(args=None):\n args = BASE_ARGS_3TO2 if args is None else BASE_ARGS_3TO2 + args\n try:\n proc = subprocess.Popen(['3to2'] + args, stderr=subprocess.PIPE)\n except OSError:\n for path in glob.glob('*.egg'):\n if os.path.isdir(path) and path not in sys.path:\n sys.path.append(path)\n try:\n from lib3to2.main import main as lib3to2_main\n except ImportError:\n raise OSError('3to2 script is unavailable.')\n else:\n if lib3to2_main('lib3to2.fixes', args):\n raise Exception('lib3to2 parsing error')\n else:\n num_errors = 0\n while proc.poll() is None:\n line = proc.stderr.readline()\n sys.stderr.write(line)\n num_errors += line.count(': ParseError: ')\n if proc.returncode or num_errors:\n raise Exception('lib3to2 parsing error')", - "docstring": "Convert Python files using lib3to2." - }, - { - "code": "def snoise2d(size, z=0.0, scale=0.05, octaves=1, persistence=0.25, lacunarity=2.0):\n import noise\n data = np.empty(size, dtype='float32')\n for y in range(size[0]):\n for x in range(size[1]):\n v = noise.snoise3(x * scale, y * scale, z,\n octaves=octaves, persistence=persistence, lacunarity=lacunarity)\n data[x, y] = v\n data = data * 0.5 + 0.5\n if __debug__:\n assert data.min() >= 0. and data.max() <= 1.0\n return data", - "docstring": "z value as like a seed" - }, - { - "code": "def _getRunningApps(cls):\n def runLoopAndExit():\n AppHelper.stopEventLoop()\n AppHelper.callLater(1, runLoopAndExit)\n AppHelper.runConsoleEventLoop()\n ws = AppKit.NSWorkspace.sharedWorkspace()\n apps = ws.runningApplications()\n return apps", - "docstring": "Get a list of the running applications." - }, - { - "code": "def spia_matrices_to_tsvs(spia_matrices: Mapping[str, pd.DataFrame], directory: str) -> None:\n os.makedirs(directory, exist_ok=True)\n for relation, df in spia_matrices.items():\n df.to_csv(os.path.join(directory, f'{relation}.tsv'), index=True)", - "docstring": "Export a SPIA data dictionary into a directory as several TSV documents." - }, - { - "code": "def convert_ranges(cls, ranges, length):\n result = []\n for start, end in ranges:\n if end is None:\n result.append( (start, length-1) )\n elif start is None:\n s = length - end\n result.append( (0 if s < 0 else s, length-1) )\n else:\n result.append( (start, end if end < length else length-1) )\n return result", - "docstring": "Converts to valid byte ranges" - }, - { - "code": "def validate_model(cursor, model):\n _validate_license(model)\n _validate_roles(model)\n required_metadata = ('title', 'summary',)\n for metadata_key in required_metadata:\n if model.metadata.get(metadata_key) in [None, '', []]:\n raise exceptions.MissingRequiredMetadata(metadata_key)\n _validate_derived_from(cursor, model)\n _validate_subjects(cursor, model)", - "docstring": "Validates the model using a series of checks on bits of the data." - }, - { - "code": "def data_to_tfrecord(images, labels, filename):\n if os.path.isfile(filename):\n print(\"%s exists\" % filename)\n return\n print(\"Converting data into %s ...\" % filename)\n writer = tf.python_io.TFRecordWriter(filename)\n for index, img in enumerate(images):\n img_raw = img.tobytes()\n label = int(labels[index])\n example = tf.train.Example(\n features=tf.train.Features(\n feature={\n \"label\": tf.train.Feature(int64_list=tf.train.Int64List(value=[label])),\n 'img_raw': tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw])),\n }\n )\n )\n writer.write(example.SerializeToString())\n writer.close()", - "docstring": "Save data into TFRecord." - }, - { - "code": "def read(self, dispatcher):\n try:\n if not self.is_header_read:\n to_read = HeronProtocol.HEADER_SIZE - len(self.header)\n self.header += dispatcher.recv(to_read)\n if len(self.header) == HeronProtocol.HEADER_SIZE:\n self.is_header_read = True\n else:\n Log.debug(\"Header read incomplete; read %d bytes of header\" % len(self.header))\n return\n if self.is_header_read and not self.is_complete:\n to_read = self.get_datasize() - len(self.data)\n self.data += dispatcher.recv(to_read)\n if len(self.data) == self.get_datasize():\n self.is_complete = True\n except socket.error as e:\n if e.errno == socket.errno.EAGAIN or e.errno == socket.errno.EWOULDBLOCK:\n Log.debug(\"Try again error\")\n else:\n Log.debug(\"Fatal error when reading IncomingPacket\")\n raise RuntimeError(\"Fatal error occured in IncomingPacket.read()\")", - "docstring": "Reads incoming data from asyncore.dispatcher" - }, - { - "code": "def save(self, *args, **kwargs):\n current_activable_value = getattr(self, self.ACTIVATABLE_FIELD_NAME)\n is_active_changed = self.id is None or self.__original_activatable_value != current_activable_value\n self.__original_activatable_value = current_activable_value\n ret_val = super(BaseActivatableModel, self).save(*args, **kwargs)\n if is_active_changed:\n model_activations_changed.send(self.__class__, instance_ids=[self.id], is_active=current_activable_value)\n if self.activatable_field_updated:\n model_activations_updated.send(self.__class__, instance_ids=[self.id], is_active=current_activable_value)\n return ret_val", - "docstring": "A custom save method that handles figuring out when something is activated or deactivated." - }, - { - "code": "def STRD(cpu, src1, src2, dest, offset=None):\n assert src1.type == 'register'\n assert src2.type == 'register'\n assert dest.type == 'memory'\n val1 = src1.read()\n val2 = src2.read()\n writeback = cpu._compute_writeback(dest, offset)\n cpu.write_int(dest.address(), val1, 32)\n cpu.write_int(dest.address() + 4, val2, 32)\n cpu._cs_hack_ldr_str_writeback(dest, offset, writeback)", - "docstring": "Writes the contents of two registers to memory." - }, - { - "code": "def replace(self, year=None, month=None, day=None, hour=None,\n minute=None, second=None, microsecond=None, tzinfo=True):\n if year is None:\n year = self.year\n if month is None:\n month = self.month\n if day is None:\n day = self.day\n if hour is None:\n hour = self.hour\n if minute is None:\n minute = self.minute\n if second is None:\n second = self.second\n if microsecond is None:\n microsecond = self.microsecond\n if tzinfo is True:\n tzinfo = self.tzinfo\n return datetime.__new__(type(self),\n year, month, day, hour, minute, second,\n microsecond, tzinfo)", - "docstring": "Return a new datetime with new values for the specified fields." - }, - { - "code": "def apply_types(use_types, guess_type, line):\n new_line = {}\n for k, v in line.items():\n if use_types.has_key(k):\n new_line[k] = force_type(use_types[k], v)\n elif guess_type:\n new_line[k] = determine_type(v)\n else:\n new_line[k] = v\n return new_line", - "docstring": "Apply the types on the elements of the line" - }, - { - "code": "def up(self):\n self.swap(self.get_ordering_queryset().filter(order__lt=self.order).order_by('-order'))", - "docstring": "Move this object up one position." - }, - { - "code": "def process_dir(self, album, force=False):\n for f in album:\n if isfile(f.dst_path) and not force:\n self.logger.info(\"%s exists - skipping\", f.filename)\n self.stats[f.type + '_skipped'] += 1\n else:\n self.stats[f.type] += 1\n yield (f.type, f.path, f.filename, f.src_path, album.dst_path,\n self.settings)", - "docstring": "Process a list of images in a directory." - }, - { - "code": "def valid_path(path):\n if path.endswith('*'):\n Log.debug('Checking classpath entry suffix as directory: %s', path[:-1])\n if os.path.isdir(path[:-1]):\n return True\n return False\n Log.debug('Checking classpath entry as directory: %s', path)\n if os.path.isdir(path):\n return True\n else:\n Log.debug('Checking classpath entry as file: %s', path)\n if os.path.isfile(path):\n return True\n return False", - "docstring": "Check if an entry in the class path exists as either a directory or a file" - }, - { - "code": "def cmd_kill(opts):\n kill_signal = opts.signal if hasattr(opts, 'signal') else \"SIGKILL\"\n __with_containers(opts, Blockade.kill, signal=kill_signal)", - "docstring": "Kill some or all containers" - }, - { - "code": "def do_help(self, arg):\n print(self.response_prompt, file=self.stdout)\n return cmd.Cmd.do_help(self, arg)", - "docstring": "Show help on all commands." - }, - { - "code": "def save_service(self, service, overwrite=True):\n name = namesgenerator.get_sane_name(service.name)\n if not name:\n name = namesgenerator.get_random_name()\n if name in self.name_index:\n name = namesgenerator.get_random_name(retry=True)\n if name in self.name_index:\n if overwrite:\n self._delete(name=name)\n else:\n raise Exception(\"service name already registered.\")\n self._insert(Service(\n name=name,\n url=baseurl(service.url),\n type=service.type,\n purl=service.purl,\n public=service.public,\n auth=service.auth,\n verify=service.verify))\n return self.fetch_by_name(name=name)", - "docstring": "Store an OWS service in database." - }, - { - "code": "def destroy(self):\r\n self.ws.destroy()\r\n self.bot.remove_listener(self.on_socket_response)\r\n self.hooks.clear()", - "docstring": "Destroys the Lavalink client." - }, - { - "code": "def datagram_received(self, datagram, address):\n size, _ = RTheader.unpack_from(datagram, 0)\n info, = struct.unpack_from(\"{0}s\".format(size - 3 - 8), datagram, RTheader.size)\n base_port, = QRTDiscoveryBasePort.unpack_from(datagram, size - 2)\n if self.receiver is not None:\n self.receiver(QRTDiscoveryResponse(info, address[0], base_port))", - "docstring": "Parse response from QTM instances" - }, - { - "code": "def parse_docstring(self):\n self.log.debug(\n \"parsing docstring, token is %r (%s)\", self.current.kind, self.current.value\n )\n while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL):\n self.stream.move()\n self.log.debug(\n \"parsing docstring, token is %r (%s)\",\n self.current.kind,\n self.current.value,\n )\n if self.current.kind == tk.STRING:\n docstring = self.current.value\n self.stream.move()\n return docstring\n return None", - "docstring": "Parse a single docstring and return its value." - }, - { - "code": "def run(self, args):\n self.args = self.parse_and_process_args(args)\n if self.args.version:\n print(__version__)\n return 0\n if self.args.rulefile:\n for filename in self.args.rulefile:\n self._load_rule_file(filename)\n if self.args.list:\n self.list_rules()\n return 0\n if self.args.describe:\n self._describe_rules(self.args.args)\n return 0\n self.counts = { ERROR: 0, WARNING: 0, \"other\": 0}\n for filename in self.args.args:\n if not (os.path.exists(filename)):\n sys.stderr.write(\"rflint: %s: No such file or directory\\n\" % filename)\n continue\n if os.path.isdir(filename):\n self._process_folder(filename)\n else:\n self._process_file(filename)\n if self.counts[ERROR] > 0:\n return self.counts[ERROR] if self.counts[ERROR] < 254 else 255\n return 0", - "docstring": "Parse command line arguments, and run rflint" - }, - { - "code": "def start(self):\n self.__thread = Threads(target=self.run, args=(True, True, False))\n self.__thread.setDaemon(True)\n self.__thread.start()", - "docstring": "Run FIO job in thread" - }, - { - "code": "def data_received(self, data):\n self._received_data += data\n h_size = RTheader.size\n data = self._received_data\n size, type_ = RTheader.unpack_from(data, 0)\n while len(data) >= size:\n self._parse_received(data[h_size:size], type_)\n data = data[size:]\n if len(data) < h_size:\n break\n size, type_ = RTheader.unpack_from(data, 0)\n self._received_data = data", - "docstring": "Received from QTM and route accordingly" - }, - { - "code": "def person_inn():\n mask11 = [7, 2, 4, 10, 3, 5, 9, 4, 6, 8]\n mask12 = [3, 7, 2, 4, 10, 3, 5, 9, 4, 6, 8]\n inn = [random.randint(1, 9) for _ in range(12)]\n weighted11 = [v * mask11[i] for i, v in enumerate(inn[:-2])]\n inn[10] = sum(weighted11) % 11 % 10\n weighted12 = [v * mask12[i] for i, v in enumerate(inn[:-1])]\n inn[11] = sum(weighted12) % 11 % 10\n return \"\".join(map(str, inn))", - "docstring": "Return a random taxation ID number for a natural person." - }, - { - "code": "def capture_bash(self):\n class Capture(object):\n def __init__(self, satchel):\n self.satchel = satchel\n self._dryrun = self.satchel.dryrun\n self.satchel.dryrun = 1\n begincap()\n self._stdout = sys.stdout\n self._stderr = sys.stderr\n self.stdout = sys.stdout = StringIO()\n self.stderr = sys.stderr = StringIO()\n def __enter__(self):\n return self\n def __exit__(self, type, value, traceback):\n endcap()\n self.satchel.dryrun = self._dryrun\n sys.stdout = self._stdout\n sys.stderr = self._stderr\n return Capture(self)", - "docstring": "Context manager that hides the command prefix and activates dryrun to capture all following task commands to their equivalent Bash outputs." - }, - { - "code": "def _validate_course(self):\n course_details = self.cleaned_data.get(self.Fields.COURSE)\n if course_details:\n course_mode = self.cleaned_data.get(self.Fields.COURSE_MODE)\n if not course_mode:\n raise ValidationError(ValidationMessages.COURSE_WITHOUT_COURSE_MODE)\n valid_course_modes = course_details[\"course_modes\"]\n if all(course_mode != mode[\"slug\"] for mode in valid_course_modes):\n error = ValidationError(ValidationMessages.COURSE_MODE_INVALID_FOR_COURSE.format(\n course_mode=course_mode,\n course_id=course_details[\"course_id\"],\n ))\n raise ValidationError({self.Fields.COURSE_MODE: error})", - "docstring": "Verify that the selected mode is valid for the given course ." - }, - { - "code": "def attr(*args, **kwargs):\n ctx = dom_tag._with_contexts[_get_thread_context()]\n if ctx and ctx[-1]:\n dicts = args + (kwargs,)\n for d in dicts:\n for attr, value in d.items():\n ctx[-1].tag.set_attribute(*dom_tag.clean_pair(attr, value))\n else:\n raise ValueError('not in a tag context')", - "docstring": "Set attributes on the current active tag context" - }, - { - "code": "def path(self):\n \"Return a list of nodes forming the path from the root to this node.\"\n node, path_back = self, []\n while node:\n path_back.append(node)\n node = node.parent\n return list(reversed(path_back))", - "docstring": "Return a list of nodes forming the path from the root to this node." - }, - { - "code": "def make_game():\n return ascii_art.ascii_art_to_game(\n GAME_ART, what_lies_beneath=' ',\n sprites=dict(\n [('P', PlayerSprite)] +\n [(c, UpwardLaserBoltSprite) for c in UPWARD_BOLT_CHARS] +\n [(c, DownwardLaserBoltSprite) for c in DOWNWARD_BOLT_CHARS]),\n drapes=dict(X=MarauderDrape,\n B=BunkerDrape),\n update_schedule=['P', 'B', 'X'] + list(_ALL_BOLT_CHARS))", - "docstring": "Builds and returns an Extraterrestrial Marauders game." - }, - { - "code": "def pformat(o, indent=1, width=80, depth=None):\n return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(o)", - "docstring": "Format a Python o into a pretty-printed representation." - }, - { - "code": "def render_page_with_error_code_message(request, context_data, error_code, log_message):\n LOGGER.error(log_message)\n messages.add_generic_error_message_with_code(request, error_code)\n return render(\n request,\n ENTERPRISE_GENERAL_ERROR_PAGE,\n context=context_data,\n status=404,\n )", - "docstring": "Return a 404 page with specified error_code after logging error and adding message to django messages." - }, - { - "code": "def pyramid(\n input_raster,\n output_dir,\n pyramid_type=None,\n output_format=None,\n resampling_method=None,\n scale_method=None,\n zoom=None,\n bounds=None,\n overwrite=False,\n debug=False\n):\n bounds = bounds if bounds else None\n options = dict(\n pyramid_type=pyramid_type,\n scale_method=scale_method,\n output_format=output_format,\n resampling=resampling_method,\n zoom=zoom,\n bounds=bounds,\n overwrite=overwrite\n )\n raster2pyramid(input_raster, output_dir, options)", - "docstring": "Create tile pyramid out of input raster." - }, - { - "code": "def _send_stream_error(self, condition):\n if self._output_state is \"closed\":\n return\n if self._output_state in (None, \"restart\"):\n self._send_stream_start()\n element = StreamErrorElement(condition).as_xml()\n self.transport.send_element(element)\n self.transport.disconnect()\n self._output_state = \"closed\"", - "docstring": "Same as `send_stream_error`, but expects `lock` acquired." - }, - { - "code": "def quit(self):\n if self._process is None:\n logger.debug('Quit was called after self._process had already been released')\n return\n try:\n logger.debug('Quitting OMXPlayer')\n process_group_id = os.getpgid(self._process.pid)\n os.killpg(process_group_id, signal.SIGTERM)\n logger.debug('SIGTERM Sent to pid: %s' % process_group_id)\n self._process_monitor.join()\n except OSError:\n logger.error('Could not find the process to kill')\n self._process = None", - "docstring": "Quit the player, blocking until the process has died" - }, - { - "code": "def filter(self,x):\n y = signal.sosfilt(self.sos,x)\n return y", - "docstring": "Filter the signal using second-order sections" - }, - { - "code": "def _store(self, offset, value, size=1):\n self.memory.write_BE(offset, value, size)\n for i in range(size):\n self._publish('did_evm_write_memory', offset + i, Operators.EXTRACT(value, (size - i - 1) * 8, 8))", - "docstring": "Stores value in memory as a big endian" - }, - { - "code": "def _destroy_image_acquirer(self, ia):\n id_ = None\n if ia.device:\n ia.stop_image_acquisition()\n ia._release_data_streams()\n id_ = ia._device.id_\n if ia.device.node_map:\n if ia._chunk_adapter:\n ia._chunk_adapter.detach_buffer()\n ia._chunk_adapter = None\n self._logger.info(\n 'Detached a buffer from the chunk adapter of {0}.'.format(\n id_\n )\n )\n ia.device.node_map.disconnect()\n self._logger.info(\n 'Disconnected the port from the NodeMap of {0}.'.format(\n id_\n )\n )\n if ia._device.is_open():\n ia._device.close()\n self._logger.info(\n 'Closed Device module, {0}.'.format(id_)\n )\n ia._device = None\n if id_:\n self._logger.info(\n 'Destroyed the ImageAcquirer object which {0} '\n 'had belonged to.'.format(id_)\n )\n else:\n self._logger.info(\n 'Destroyed an ImageAcquirer.'\n )\n if self._profiler:\n self._profiler.print_diff()\n self._ias.remove(ia)", - "docstring": "Releases all external resources including the controlling device." - }, - { - "code": "def export_keys(output_path, stash, passphrase, backend):\n stash = _get_stash(backend, stash, passphrase)\n try:\n click.echo('Exporting stash to {0}...'.format(output_path))\n stash.export(output_path=output_path)\n click.echo('Export complete!')\n except GhostError as ex:\n sys.exit(ex)", - "docstring": "Export all keys to a file" - }, - { - "code": "def rootdir(self, username, reponame, create=True):\n path = os.path.join(self.workspace,\n 'datasets',\n username,\n reponame)\n if create:\n try:\n os.makedirs(path)\n except:\n pass\n return path", - "docstring": "Working directory for the repo" - }, - { - "code": "def what_requires(self, name):\n r = self.local_renderer\n r.env.name = name\n r.local('pipdeptree -p {name} --reverse')", - "docstring": "Lists the packages that require the given package." - }, - { - "code": "def main(output):\n from hbp_knowledge import get_graph\n graph = get_graph()\n text = to_html(graph)\n print(text, file=output)", - "docstring": "Output the HBP knowledge graph to the desktop" - }, - { - "code": "def getZeroedOutEncoding(self, n):\n assert all(field.numRecords>n for field in self.fields)\n encoding = np.concatenate([field.encoder.encode(SENTINEL_VALUE_FOR_MISSING_DATA)\\\n if field.isPredictedField else field.encodings[n] for field in self.fields])\n return encoding", - "docstring": "Returns the nth encoding with the predictedField zeroed out" - }, - { - "code": "def hash(self, id):\n h = md5(id).hexdigest()\n return os.path.join(self.path, h+self.type)", - "docstring": "Creates a unique filename in the cache for the id." - }, - { - "code": "def htmlFor(self,fname):\n if os.path.splitext(fname)[1].lower() in ['.jpg','.png']:\n html=''%(fname,fname)\n if \"_tif_\" in fname:\n html=html.replace(' 0\n )\n return sorted(allowed, key=backend.by_priority, reverse=True)", - "docstring": "Discover all keyrings for chaining." - }, - { - "code": "def read(args):\n if args.config_file is None or not isfile(args.config_file):\n return\n logging.info(\"Reading configure file: %s\"%args.config_file)\n config = cparser.ConfigParser()\n config.read(args.config_file)\n if not config.has_section('lrcloud'):\n raise RuntimeError(\"Configure file has no [lrcloud] section!\")\n for (name, value) in config.items('lrcloud'):\n if value == \"True\":\n value = True\n elif value == \"False\":\n value = False\n if getattr(args, name) is None:\n setattr(args, name, value)", - "docstring": "Reading the configure file and adds non-existing attributes to 'args" - }, - { - "code": "def _warn_if_not_finite(X):\n X = np.asanyarray(X)\n if (X.dtype.char in np.typecodes['AllFloat'] and\n not np.isfinite(X.sum()) and not np.isfinite(X).all()):\n warnings.warn(\"Result contains NaN, infinity\"\n \" or a value too large for %r.\" % X.dtype,\n category=UserWarning)", - "docstring": "UserWarning if array contains non-finite elements" - }, - { - "code": "def code_mapping(level, msg, default=99):\n try:\n return code_mappings_by_level[level][msg]\n except KeyError:\n pass\n if msg.count('\"') == 2 and ' \"' in msg and msg.endswith('\".'):\n txt = msg[: msg.index(' \"')]\n return code_mappings_by_level[level].get(txt, default)\n return default", - "docstring": "Return an error code between 0 and 99." - }, - { - "code": "def _replace_on_id(self, new_object):\n the_id = new_object.id\n the_index = self._dict[the_id]\n list.__setitem__(self, the_index, new_object)", - "docstring": "Replace an object by another with the same id." - }, - { - "code": "def handle_set(self, item, value):\n doc = yield from self.call('SET/{}'.format(item), dict(value=value))\n if doc is None:\n return None\n return doc.status == 'FS_OK'", - "docstring": "Helper method for setting a value by using the fsapi API." - }, - { - "code": "def cached_httpbl_exempt(view_func):\n def wrapped_view(*args, **kwargs):\n return view_func(*args, **kwargs)\n wrapped_view.cached_httpbl_exempt = True\n return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)", - "docstring": "Marks a view function as being exempt from the cached httpbl view protection." - }, - { - "code": "def images(self):\n \"List of paths to images.\"\n tifs = _pattern(self._image_path, extension='tif')\n pngs = _pattern(self._image_path, extension='png')\n imgs = []\n imgs.extend(glob(tifs))\n imgs.extend(glob(pngs))\n return imgs", - "docstring": "List of paths to images." - }, - { - "code": "def _get_warped_array(\n input_file=None,\n indexes=None,\n dst_bounds=None,\n dst_shape=None,\n dst_crs=None,\n resampling=None,\n src_nodata=None,\n dst_nodata=None\n):\n try:\n return _rasterio_read(\n input_file=input_file,\n indexes=indexes,\n dst_bounds=dst_bounds,\n dst_shape=dst_shape,\n dst_crs=dst_crs,\n resampling=resampling,\n src_nodata=src_nodata,\n dst_nodata=dst_nodata\n )\n except Exception as e:\n logger.exception(\"error while reading file %s: %s\", input_file, e)\n raise", - "docstring": "Extract a numpy array from a raster file." - }, - { - "code": "def paramname(param=\"\"):\n try: \n name = pinfo[str(param)][0].strip().split(\" \")[1]\n except (KeyError, ValueError) as err:\n print(\"\\tKey name/number not recognized - \".format(param), err)\n raise\n return name", - "docstring": "Get the param name from the dict index value." - }, - { - "code": "def _calculate_period(self, vals):\n if len(vals) < 4:\n return None\n if self.firmware['major'] < 16:\n return ((vals[3] << 24) | (vals[2] << 16) | (vals[1] << 8) | vals[0]) / 12e6\n else:\n return self._calculate_float(vals)", - "docstring": "calculate the sampling period in seconds" - }, - { - "code": "def offer(self, p, e: Event):\n existing = self.events_scan.setdefault(\n p, ([], [], [], []) if USE_VERTICAL else\n ([], [], []))\n existing[e.type].append(e)", - "docstring": "Offer a new event ``s`` at point ``p`` in this queue." - }, - { - "code": "def load_db_set(self, name, r=None):\n r = r or self\n db_set = r.genv.db_sets.get(name, {})\n r.genv.update(db_set)", - "docstring": "Loads database parameters from a specific named set." - }, - { - "code": "def from_spec(spec, kwargs=None):\n network = util.get_object(\n obj=spec,\n default_object=LayeredNetwork,\n kwargs=kwargs\n )\n assert isinstance(network, Network)\n return network", - "docstring": "Creates a network from a specification dict." - }, - { - "code": "def missing(data, *args):\n not_found = object()\n if args and isinstance(args[0], list):\n args = args[0]\n ret = []\n for arg in args:\n if get_var(data, arg, not_found) is not_found:\n ret.append(arg)\n return ret", - "docstring": "Implements the missing operator for finding missing variables." - }, - { - "code": "def _update_transmissions(self, content_metadata_item_map, transmission_map):\n for content_id, channel_metadata in content_metadata_item_map.items():\n transmission = transmission_map[content_id]\n transmission.channel_metadata = channel_metadata\n transmission.save()", - "docstring": "Update ContentMetadataItemTransmision models for the given content metadata items." - }, - { - "code": "def bounds(self):\n if self._raw[\"bounds\"] is None:\n return self.process_pyramid.bounds\n else:\n return Bounds(*_validate_bounds(self._raw[\"bounds\"]))", - "docstring": "Process bounds as defined in the configuration." - }, - { - "code": "def _remove_bias(x, axis):\n \"Subtracts an estimate of the mean from signal x at axis\"\n padded_slice = [slice(d) for d in x.shape]\n padded_slice[axis] = np.newaxis\n mn = np.mean(x, axis=axis)\n return x - mn[tuple(padded_slice)]", - "docstring": "Subtracts an estimate of the mean from signal x at axis" - }, - { - "code": "def node_theta(self, node):\n group = self.find_node_group_membership(node)\n return self.group_theta(group)", - "docstring": "Convenience function to find the node's theta angle." - }, - { - "code": "def _has_branch(branch):\n ret = temple.utils.shell('git rev-parse --verify {}'.format(branch),\n stderr=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n check=False)\n return ret.returncode == 0", - "docstring": "Return True if the target branch exists." - }, - { - "code": "def add_user_to_allow(self, name, user):\n if not self.remove_user_from_acl(name, user):\n return False\n if name not in self._acl:\n return False\n self._acl[name]['allow'].append(user)\n return True", - "docstring": "Add a user to the given acl allow block." - }, - { - "code": "def between(a, b, inclusive_min=True, inclusive_max=True):\n return RangeValue(a, b,\n inclusive_min=inclusive_min, inclusive_max=inclusive_max)", - "docstring": "Indicate that value is a numeric range" - }, - { - "code": "def confirm(tag):\n click.echo()\n if click.confirm('Do you want to create the tag {tag}?'.format(\n tag=click.style(str(tag), fg='yellow')),\n default=True, abort=True):\n git.create_tag(tag)\n if click.confirm(\n 'Do you want to push the tag {tag} into the upstream?'.format(\n tag=click.style(str(tag), fg='yellow')),\n default=True):\n git.push_tag(tag)\n click.echo('Done!')\n else:\n git.delete_tag(tag)\n click.echo('Aborted!')", - "docstring": "Prompts user before proceeding" - }, - { - "code": "def includeme(config):\n settings = config.registry.settings\n session_factory = SignedCookieSessionFactory(settings['session_key'])\n config.set_session_factory(session_factory)", - "docstring": "Configures the session manager" - }, - { - "code": "def memoize(func):\n class Memodict(dict):\n def __getitem__(self, *key):\n return dict.__getitem__(self, key)\n def __missing__(self, key):\n ret = self[key] = func(*key)\n return ret\n return Memodict().__getitem__", - "docstring": "Memoization decorator for a function taking one or more arguments." - }, - { - "code": "def delete_thing(self, thing):\n try:\n self.things.remove(thing)\n except ValueError, e:\n print e\n print \" in Environment delete_thing\"\n print \" Thing to be removed: %s at %s\" % (thing, thing.location)\n print \" from list: %s\" % [(thing, thing.location)\n for thing in self.things]\n if thing in self.agents:\n self.agents.remove(thing)", - "docstring": "Remove a thing from the environment." - }, - { - "code": "def bohachevsky1(theta):\n x, y = theta\n obj = x ** 2 + 2 * y ** 2 - 0.3 * np.cos(3 * np.pi * x) - 0.4 * np.cos(4 * np.pi * y) + 0.7\n grad = np.array([\n 2 * x + 0.3 * np.sin(3 * np.pi * x) * 3 * np.pi,\n 4 * y + 0.4 * np.sin(4 * np.pi * y) * 4 * np.pi,\n ])\n return obj, grad", - "docstring": "One of the Bohachevsky functions" - }, - { - "code": "def do_toggle_variables(self, action):\n self.show_vars = action.get_active()\n if self.show_vars:\n self.show_variables_window()\n else:\n self.hide_variables_window()", - "docstring": "Widget Action to toggle showing the variables window." - }, - { - "code": "def heappop_max(heap):\n lastelt = heap.pop()\n if heap:\n returnitem = heap[0]\n heap[0] = lastelt\n _siftup_max(heap, 0)\n return returnitem\n return lastelt", - "docstring": "Maxheap version of a heappop." - }, - { - "code": "def iterkeys(data, **kwargs):\n return iter(data.keys(**kwargs)) if IS_PY3 else data.iterkeys(**kwargs)", - "docstring": "Iterate over dict keys." - }, - { - "code": "def mate(self, other):\n \"Return a new individual crossing self and other.\"\n c = random.randrange(len(self.genes))\n return self.__class__(self.genes[:c] + other.genes[c:])", - "docstring": "Return a new individual crossing self and other." - }, - { - "code": "def close(self):\n self._logger.info(\"Closing\")\n if self._opened:\n self._opened = False\n else:\n self._logger.warning(\n \"close() called, but connection policy was alredy closed\")\n return", - "docstring": "Close the policy instance." - }, - { - "code": "def score_models(clf, X, y, encoder, runs=1):\n scores = []\n X_test = None\n for _ in range(runs):\n X_test = encoder().fit_transform(X, y)\n X_test = StandardScaler().fit_transform(X_test)\n scores.append(cross_validate(clf, X_test, y, n_jobs=1, cv=5)['test_score'])\n gc.collect()\n scores = [y for z in [x for x in scores] for y in z]\n return float(np.mean(scores)), float(np.std(scores)), scores, X_test.shape[1]", - "docstring": "Takes in a classifier that supports multiclass classification, and X and a y, and returns a cross validation score." - }, - { - "code": "def value(self,ascode=None):\n if ascode is None:\n ascode = self.code\n return self.cast[ascode](self.text)", - "docstring": "Return text cast to the correct type or the selected type" - }, - { - "code": "def protect_memory_callback(self, start, size, perms):\n logger.info(f\"Changing permissions on {hex(start)}:{hex(start + size)} to {perms}\")\n self._emu.mem_protect(start, size, convert_permissions(perms))", - "docstring": "Set memory protections in Unicorn correctly" - }, - { - "code": "def slinky(filename, seconds_available, bucket_name, aws_key, aws_secret):\n if not os.environ.get('AWS_ACCESS_KEY_ID') and os.environ.get('AWS_SECRET_ACCESS_KEY'):\n \tprint 'Need to set environment variables for AWS access and create a slinky bucket.'\n \texit()\n print create_temp_s3_link(filename, seconds_available, bucket_name)", - "docstring": "Simple program that creates an temp S3 link." - }, - { - "code": "def calc_pts_hg(npts=20):\n pts_hg, wts_hg = np.polynomial.hermite.hermgauss(npts*2)\n pts_hg = pts_hg[npts:]\n wts_hg = wts_hg[npts:] * np.exp(pts_hg*pts_hg)\n return pts_hg, wts_hg", - "docstring": "Returns Hermite-Gauss quadrature points for even functions" - }, - { - "code": "async def handle_event(self, event):\r\n if isinstance(event, (TrackStuckEvent, TrackExceptionEvent)) or \\\r\n isinstance(event, TrackEndEvent) and event.reason == 'FINISHED':\r\n await self.play()", - "docstring": "Makes the player play the next song from the queue if a song has finished or an issue occurred." - }, - { - "code": "def addMenu(self):\n self.parent.multiLogLayout.addLayout(self.logSelectLayout)\n self.getPrograms(logType, programName)", - "docstring": "Add menus to parent gui." - }, - { - "code": "def _addRecordToKNN(self, record):\n classifier = self.htm_prediction_model._getAnomalyClassifier()\n knn = classifier.getSelf()._knn\n prototype_idx = classifier.getSelf().getParameter('categoryRecencyList')\n category = self._labelListToCategoryNumber(record.anomalyLabel)\n if record.ROWID in prototype_idx:\n knn.prototypeSetCategory(record.ROWID, category)\n return\n pattern = self._getStateAnomalyVector(record)\n rowID = record.ROWID\n knn.learn(pattern, category, rowID=rowID)", - "docstring": "This method will add the record to the KNN classifier." - }, - { - "code": "def _c2x(self, c):\n return 0.5 * (self.window[0] + self.window[1] +\n c * (self.window[1] - self.window[0]))", - "docstring": "Convert cheb coordinates to windowdow coordinates" - }, - { - "code": "def add_file_normal(f, targetdir, generator,script, source):\n basename = os.path.basename(f)\n if targetdir != \".\":\n relativepath = os.path.join(targetdir, basename)\n else:\n relativepath = basename\n relpath = os.path.relpath(f, os.getcwd())\n filetype = 'data'\n if script:\n filetype = 'script'\n if generator:\n filetype = 'generator'\n update = OrderedDict([\n ('type', filetype),\n ('generator', generator),\n ('relativepath', relativepath),\n ('content', \"\"),\n ('source', source),\n ('localfullpath', f),\n ('localrelativepath', relpath)\n ])\n update = annotate_record(update)\n return (basename, update)", - "docstring": "Add a normal file including its source" - }, - { - "code": "def build(self, **kwargs):\n self.yacc = yacc.yacc(module=self, **kwargs)", - "docstring": "Must be called before parse." - }, - { - "code": "def _get_available_choices(self, queryset, value):\n item = queryset.filter(pk=value).first()\n if item:\n try:\n pk = getattr(item, self.chained_model_field + \"_id\")\n filter = {self.chained_model_field: pk}\n except AttributeError:\n try:\n pks = getattr(item, self.chained_model_field).all().values_list('pk', flat=True)\n filter = {self.chained_model_field + \"__in\": pks}\n except AttributeError:\n try:\n pks = getattr(item, self.chained_model_field + \"_set\").all().values_list('pk', flat=True)\n filter = {self.chained_model_field + \"__in\": pks}\n except AttributeError:\n filter = {}\n filtered = list(get_model(self.to_app_name, self.to_model_name).objects.filter(**filter).distinct())\n if self.sort:\n sort_results(filtered)\n else:\n filtered = []\n return filtered", - "docstring": "get possible choices for selection" - }, - { - "code": "def on_save_as(self):\n path = self.tabWidget.current_widget().file.path\n path = os.path.dirname(path) if path else ''\n filename, filter = QtWidgets.QFileDialog.getSaveFileName(\n self, 'Save', path)\n if filename:\n self.tabWidget.save_current(filename)\n self.recent_files_manager.open_file(filename)\n self.menu_recents.update_actions()\n self.actionRun.setEnabled(True)\n self.actionConfigure_run.setEnabled(True)\n self._update_status_bar(self.tabWidget.current_widget())", - "docstring": "Save the current editor document as." - }, - { - "code": "def serpentine_x(x, y, matrix):\n if y % 2:\n return matrix.columns - 1 - x, y\n return x, y", - "docstring": "Every other row is indexed in reverse." - }, - { - "code": "def send_status_response(environ, start_response, e, add_headers=None, is_head=False):\n status = get_http_status_string(e)\n headers = []\n if add_headers:\n headers.extend(add_headers)\n if e in (HTTP_NOT_MODIFIED, HTTP_NO_CONTENT):\n start_response(\n status, [(\"Content-Length\", \"0\"), (\"Date\", get_rfc1123_time())] + headers\n )\n return [b\"\"]\n if e in (HTTP_OK, HTTP_CREATED):\n e = DAVError(e)\n assert isinstance(e, DAVError)\n content_type, body = e.get_response_page()\n if is_head:\n body = compat.b_empty\n assert compat.is_bytes(body), body\n start_response(\n status,\n [\n (\"Content-Type\", content_type),\n (\"Date\", get_rfc1123_time()),\n (\"Content-Length\", str(len(body))),\n ]\n + headers,\n )\n return [body]", - "docstring": "Start a WSGI response for a DAVError or status code." - }, - { - "code": "def setup_dirs(data):\n pdir = os.path.realpath(data.paramsdict[\"project_dir\"])\n data.dirs.clusts = os.path.join(pdir, \"{}_clust_{}\"\\\n .format(data.name, data.paramsdict[\"clust_threshold\"]))\n if not os.path.exists(data.dirs.clusts):\n os.mkdir(data.dirs.clusts)\n data.tmpdir = os.path.abspath(os.path.expanduser(\n os.path.join(pdir, data.name+'-tmpalign')))\n if not os.path.exists(data.tmpdir):\n os.mkdir(data.tmpdir)\n if not data.paramsdict[\"assembly_method\"] == \"denovo\":\n data.dirs.refmapping = os.path.join(pdir, \"{}_refmapping\".format(data.name))\n if not os.path.exists(data.dirs.refmapping):\n os.mkdir(data.dirs.refmapping)", - "docstring": "sets up directories for step3 data" - }, - { - "code": "def _node_to_model(tree_or_item, metadata=None, parent=None,\n lucent_id=cnxepub.TRANSLUCENT_BINDER_ID):\n if 'contents' in tree_or_item:\n tree = tree_or_item\n binder = cnxepub.TranslucentBinder(metadata=tree)\n for item in tree['contents']:\n node = _node_to_model(item, parent=binder,\n lucent_id=lucent_id)\n if node.metadata['title'] != item['title']:\n binder.set_title_for_node(node, item['title'])\n result = binder\n else:\n item = tree_or_item\n result = cnxepub.DocumentPointer(item['id'], metadata=item)\n if parent is not None:\n parent.append(result)\n return result", - "docstring": "Given a tree, parse to a set of models" - }, - { - "code": "def registerGoodClass(self, class_):\n self._valid_classes.append(class_)\n for name, cls in class_members(class_):\n if self.isValidClass(cls):\n self.registerGoodClass(cls)", - "docstring": "Internal bookkeeping to handle nested classes" - }, - { - "code": "def cancel_scheduled_play(self, call_params):\n path = '/' + self.api_version + '/CancelScheduledPlay/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Cancel a Scheduled Play Helper" - }, - { - "code": "def start(self):\r\n if self.extra_args:\r\n sys.exit('{} takes no extra arguments'.format(self.name))\r\n else:\r\n if self._toggle_value:\r\n nbextensions.install_nbextension_python(\r\n _pkg_name, overwrite=True, symlink=False,\r\n user=self.user, sys_prefix=self.sys_prefix, prefix=None,\r\n nbextensions_dir=None, logger=None)\r\n else:\r\n nbextensions.uninstall_nbextension_python(\r\n _pkg_name, user=self.user, sys_prefix=self.sys_prefix,\r\n prefix=None, nbextensions_dir=None, logger=None)\r\n self.toggle_nbextension_python(_pkg_name)\r\n self.toggle_server_extension_python(_pkg_name)", - "docstring": "Perform the App's actions as configured." - }, - { - "code": "def save_thumbnail(image_path, base_image_name, gallery_conf):\n first_image_file = image_path.format(1)\n thumb_dir = os.path.join(os.path.dirname(first_image_file), 'thumb')\n if not os.path.exists(thumb_dir):\n os.makedirs(thumb_dir)\n thumb_file = os.path.join(thumb_dir,\n 'sphx_glr_%s_thumb.png' % base_image_name)\n if os.path.exists(first_image_file):\n scale_image(first_image_file, thumb_file, 400, 280)\n elif not os.path.exists(thumb_file):\n default_thumb_file = os.path.join(glr_path_static(), 'no_image.png')\n default_thumb_file = gallery_conf.get(\"default_thumb_file\",\n default_thumb_file)\n scale_image(default_thumb_file, thumb_file, 200, 140)", - "docstring": "Save the thumbnail image" - }, - { - "code": "def clear_dir(self, path):\n dirs, files = self.storage.listdir(path)\n for f in files:\n fpath = os.path.join(path, f)\n if self.dry_run:\n self.log(\"Pretending to delete '%s'\" %\n smart_text(fpath), level=1)\n else:\n self.log(\"Deleting '%s'\" % smart_text(fpath), level=1)\n self.storage.delete(fpath)\n for d in dirs:\n self.clear_dir(os.path.join(path, d))", - "docstring": "Deletes the given relative path using the destination storage backend." - }, - { - "code": "def find_block(context, *names):\n block_set = context.render_context[BLOCK_CONTEXT_KEY]\n for name in names:\n block = block_set.get_block(name)\n if block is not None:\n return block\n raise template.TemplateSyntaxError('No widget found for: %r' % (names,))", - "docstring": "Find the first matching block in the current block_context" - }, - { - "code": "def _extractCallingMethodArgs():\n import inspect\n import copy\n callingFrame = inspect.stack()[1][0]\n argNames, _, _, frameLocalVarDict = inspect.getargvalues(callingFrame)\n argNames.remove(\"self\")\n args = copy.copy(frameLocalVarDict)\n for varName in frameLocalVarDict:\n if varName not in argNames:\n args.pop(varName)\n return args", - "docstring": "Returns args dictionary from the calling method" - }, - { - "code": "def imgmax(self):\n if not hasattr(self, '_imgmax'):\n imgmax = _np.max(self.images[0])\n for img in self.images:\n imax = _np.max(img)\n if imax > imgmax:\n imgmax = imax\n self._imgmax = imgmax\n return self._imgmax", - "docstring": "Highest value of input image." - }, - { - "code": "def sanitize(self, val):\n if self.type == NUMBER:\n try:\n return clamp(self.min, self.max, float(val))\n except ValueError:\n return 0.0\n elif self.type == TEXT:\n try:\n return unicode(str(val), \"utf_8\", \"replace\")\n except:\n return \"\"\n elif self.type == BOOLEAN:\n if unicode(val).lower() in (\"true\", \"1\", \"yes\"):\n return True\n else:\n return False", - "docstring": "Given a Variable and a value, cleans it out" - }, - { - "code": "def apply_mask(img, mask):\n from .mask import apply_mask\n vol, _ = apply_mask(img, mask)\n return vector_to_volume(vol, read_img(mask).get_data().astype(bool))", - "docstring": "Return the image with the given `mask` applied." - }, - { - "code": "def record_schema(self):\n schema_path = current_jsonschemas.url_to_path(self['$schema'])\n schema_prefix = current_app.config['DEPOSIT_JSONSCHEMAS_PREFIX']\n if schema_path and schema_path.startswith(schema_prefix):\n return current_jsonschemas.path_to_url(\n schema_path[len(schema_prefix):]\n )", - "docstring": "Convert deposit schema to a valid record schema." - }, - { - "code": "def _setRTSDTR(port, RTS, DTR):\n port.setRTS(RTS)\n port.setDTR(DTR)", - "docstring": "Set RTS and DTR to the requested state." - }, - { - "code": "def _strip_zoom(input_string, strip_string):\n try:\n return int(input_string.strip(strip_string))\n except Exception as e:\n raise MapcheteConfigError(\"zoom level could not be determined: %s\" % e)", - "docstring": "Return zoom level as integer or throw error." - }, - { - "code": "def contains_entry(self, *args, **kwargs):\n self._check_dict_like(self.val, check_values=False)\n entries = list(args) + [{k:v} for k,v in kwargs.items()]\n if len(entries) == 0:\n raise ValueError('one or more entry args must be given')\n missing = []\n for e in entries:\n if type(e) is not dict:\n raise TypeError('given entry arg must be a dict')\n if len(e) != 1:\n raise ValueError('given entry args must contain exactly one key-value pair')\n k = next(iter(e))\n if k not in self.val:\n missing.append(e)\n elif self.val[k] != e[k]:\n missing.append(e)\n if missing:\n self._err('Expected <%s> to contain entries %s, but did not contain %s.' % (self.val, self._fmt_items(entries), self._fmt_items(missing)))\n return self", - "docstring": "Asserts that val is a dict and contains the given entry or entries." - }, - { - "code": "def _get_authorization_headers(sapisid_cookie):\n time_msec = int(time.time() * 1000)\n auth_string = '{} {} {}'.format(time_msec, sapisid_cookie, ORIGIN_URL)\n auth_hash = hashlib.sha1(auth_string.encode()).hexdigest()\n sapisidhash = 'SAPISIDHASH {}_{}'.format(time_msec, auth_hash)\n return {\n 'authorization': sapisidhash,\n 'x-origin': ORIGIN_URL,\n 'x-goog-authuser': '0',\n }", - "docstring": "Return authorization headers for API request." - }, - { - "code": "def _self_referential_fk(klass_model):\n for f in klass_model._meta.concrete_fields:\n if f.related_model:\n if issubclass(klass_model, f.related_model):\n return f.attname\n return None", - "docstring": "Return whether this model has a self ref FK, and the name for the field" - }, - { - "code": "def _save_config(self, filename=None):\n if filename is None:\n filename = self._config_filename\n parent_path = os.path.dirname(filename)\n if not os.path.isdir(parent_path):\n os.makedirs(parent_path)\n with open(filename, \"w\") as configfile:\n self._config.write(configfile)", - "docstring": "Save the given user configuration." - }, - { - "code": "def show(self, args, file_handle=None, **kwargs):\n \"Write to file_handle if supplied, othewise print output\"\n full_string = ''\n info = {'root_directory': '',\n 'batch_name': '',\n 'batch_tag': '',\n 'batch_description': '',\n 'launcher': '',\n 'timestamp_format': '',\n 'timestamp': tuple(time.localtime()),\n 'varying_keys': args.varying_keys,\n 'constant_keys': args.constant_keys,\n 'constant_items': args.constant_items}\n quoted_cmds = [ subprocess.list2cmdline(\n [el for el in self(self._formatter(s),'',info)])\n for s in args.specs]\n cmd_lines = ['%d: %s\\n' % (i, qcmds) for (i,qcmds)\n in enumerate(quoted_cmds)]\n full_string += ''.join(cmd_lines)\n if file_handle:\n file_handle.write(full_string)\n file_handle.flush()\n else:\n print(full_string)", - "docstring": "Write to file_handle if supplied, othewise print output" - }, - { - "code": "def download_file(from_url, to_filename=None,\n chunk_size=1024 * 8, retry_count=3):\n if not to_filename:\n to_filename = get_temporary_file()\n session = requests.Session()\n adapter = requests.adapters.HTTPAdapter(max_retries=retry_count)\n session.mount(from_url, adapter)\n response = session.get(from_url, stream=True)\n with open(to_filename, 'wb') as fd:\n for chunk in response.iter_content(chunk_size):\n fd.write(chunk)\n return to_filename", - "docstring": "Download URL to a file." - }, - { - "code": "def Loc(kind, loc=None):\n @llrule(loc, lambda parser: [kind])\n def rule(parser):\n result = parser._accept(kind)\n if result is unmatched:\n return result\n return result.loc\n return rule", - "docstring": "A rule that accepts a token of kind ``kind`` and returns its location, or returns None." - }, - { - "code": "def ensure_task_params_are_complete(task_descriptors):\n for task_desc in task_descriptors:\n for param in [\n 'labels', 'envs', 'inputs', 'outputs', 'input-recursives',\n 'output-recursives'\n ]:\n if not task_desc.task_params.get(param):\n task_desc.task_params[param] = set()", - "docstring": "For each task, ensure that each task param entry is not None." - }, - { - "code": "def parse_type_comment(type_comment):\n try:\n result = ast3.parse(type_comment, '', 'eval')\n except SyntaxError:\n raise ValueError(f\"invalid type comment: {type_comment!r}\") from None\n assert isinstance(result, ast3.Expression)\n return result.body", - "docstring": "Parse a type comment string into AST nodes." - }, - { - "code": "def add_data_tuple(self, stream_id, new_data_tuple, tuple_size_in_bytes):\n if (self.current_data_tuple_set is None) or \\\n (self.current_data_tuple_set.stream.id != stream_id) or \\\n (len(self.current_data_tuple_set.tuples) >= self.data_tuple_set_capacity) or \\\n (self.current_data_tuple_size_in_bytes >= self.max_data_tuple_size_in_bytes):\n self._init_new_data_tuple(stream_id)\n added_tuple = self.current_data_tuple_set.tuples.add()\n added_tuple.CopyFrom(new_data_tuple)\n self.current_data_tuple_size_in_bytes += tuple_size_in_bytes\n self.total_data_emitted_in_bytes += tuple_size_in_bytes", - "docstring": "Add a new data tuple to the currently buffered set of tuples" - }, - { - "code": "def consume(self, consume_function):\n from heronpy.streamlet.impl.consumebolt import ConsumeStreamlet\n consume_streamlet = ConsumeStreamlet(consume_function, self)\n self._add_child(consume_streamlet)\n return", - "docstring": "Calls consume_function for each element of this streamlet. This function returns nothing" - }, - { - "code": "def node_heap(self):\n log.info('Heap')\n res = self.__exchange('print(node.heap())')\n log.info(res)\n return int(res.split('\\r\\n')[1])", - "docstring": "Show device heap size" - }, - { - "code": "def _in_range(self, index):\n if isinstance(index, slice):\n in_range = index.start < index.stop and \\\n index.start >= self.start and \\\n index.stop <= self.end\n else:\n in_range = index >= self.start and \\\n index <= self.end\n return in_range", - "docstring": "Returns True if index is in range" - }, - { - "code": "def annotate_metadata_data(repo, task, patterns=[\"*\"], size=0):\n mgr = plugins_get_mgr() \n keys = mgr.search('representation')['representation']\n representations = [mgr.get_by_key('representation', k) for k in keys]\n matching_files = repo.find_matching_files(patterns)\n package = repo.package\n rootdir = repo.rootdir\n files = package['resources']\n for f in files:\n relativepath = f['relativepath']\n if relativepath in matching_files:\n path = os.path.join(rootdir, relativepath)\n if task == 'preview':\n print(\"Adding preview for \", relativepath)\n f['content'] = open(path).read()[:size]\n elif task == 'schema':\n for r in representations: \n if r.can_process(path): \n print(\"Adding schema for \", path)\n f['schema'] = r.get_schema(path)\n break", - "docstring": "Update metadata with the content of the files" - }, - { - "code": "def angles(self):\n return [self.ode_obj.getAngle(i) for i in range(self.ADOF)]", - "docstring": "List of angles for rotational degrees of freedom." - }, - { - "code": "def apply_function(self, points):\n if not self.option.function:\n return points\n if np is None:\n raise ImportError('numpy is not available')\n if ':' in self.option.function:\n function, arguments = self.option.function.split(':', 1)\n arguments = arguments.split(',')\n else:\n function = self.option.function\n arguments = []\n arguments = list(map(self._function_argument, arguments))\n filter_function = FUNCTION.get(function)\n if filter_function is None:\n raise TypeError('Invalid function \"%s\"' % (function,))\n else:\n return filter_function(np.array(list(points)), *arguments)", - "docstring": "Run the filter function on the provided points." - }, - { - "code": "def missing_info(**kwargs) -> str:\n func = kwargs.pop('func', 'unknown')\n if 'ticker' in kwargs: kwargs['ticker'] = kwargs['ticker'].replace('/', '_')\n info = utils.to_str(kwargs, fmt='{value}', sep='/')[1:-1]\n return f'{func}/{info}'", - "docstring": "Full infomation for missing query" - }, - { - "code": "def sorted(list, cmp=None, reversed=False):\n list = [x for x in list]\n list.sort(cmp)\n if reversed: list.reverse()\n return list", - "docstring": "Returns a sorted copy of the list." - }, - { - "code": "def package_verif_node(self, package):\n verif_node = BNode()\n type_triple = (verif_node, RDF.type, self.spdx_namespace.PackageVerificationCode)\n self.graph.add(type_triple)\n value_triple = (verif_node, self.spdx_namespace.packageVerificationCodeValue, Literal(package.verif_code))\n self.graph.add(value_triple)\n excl_file_nodes = map(\n lambda excl: Literal(excl), package.verif_exc_files)\n excl_predicate = self.spdx_namespace.packageVerificationCodeExcludedFile\n excl_file_triples = [(verif_node, excl_predicate, xcl_file) for xcl_file in excl_file_nodes]\n for trp in excl_file_triples:\n self.graph.add(trp)\n return verif_node", - "docstring": "Return a node representing package verification code." - }, - { - "code": "def csi(self, capname, *args):\n value = curses.tigetstr(capname)\n if value is None:\n return b''\n else:\n return curses.tparm(value, *args)", - "docstring": "Return the escape sequence for the selected Control Sequence." - }, - { - "code": "def check_pid(pid, debug):\n try:\n os.kill(pid, 0)\n if debug > 1:\n print(\"Script has a PIDFILE where the process is still running\")\n return True\n except OSError:\n if debug > 1:\n print(\"Script does not appear to be running\")\n return False", - "docstring": "This function will check whether a PID is currently running" - }, - { - "code": "def _getTPClass(temporalImp):\n if temporalImp == 'py':\n return backtracking_tm.BacktrackingTM\n elif temporalImp == 'cpp':\n return backtracking_tm_cpp.BacktrackingTMCPP\n elif temporalImp == 'tm_py':\n return backtracking_tm_shim.TMShim\n elif temporalImp == 'tm_cpp':\n return backtracking_tm_shim.TMCPPShim\n elif temporalImp == 'monitored_tm_py':\n return backtracking_tm_shim.MonitoredTMShim\n else:\n raise RuntimeError(\"Invalid temporalImp '%s'. Legal values are: 'py', \"\n \"'cpp', 'tm_py', 'monitored_tm_py'\" % (temporalImp))", - "docstring": "Return the class corresponding to the given temporalImp string" - }, - { - "code": "def _expand_targets(self, targets, base_dir=None):\n all_targets = []\n for target in targets:\n target_dirs = [p for p in [base_dir, os.path.dirname(target)] if p]\n target_dir = target_dirs and os.path.join(*target_dirs) or ''\n target = os.path.basename(target)\n target_path = os.path.join(target_dir, target)\n if os.path.exists(target_path):\n all_targets.append(target_path)\n with open(target_path) as fp:\n for line in fp:\n if line.startswith('-r '):\n _, new_target = line.split(' ', 1)\n all_targets.extend(self._expand_targets([new_target.strip()], base_dir=target_dir))\n return all_targets", - "docstring": "Expand targets by looking for '-r' in targets." - }, - { - "code": "def draw_bbox(self, projection_matrix=None, camera_matrix=None, all=True):\n projection_matrix = projection_matrix.astype('f4').tobytes()\n camera_matrix = camera_matrix.astype('f4').tobytes()\n self.bbox_program[\"m_proj\"].write(projection_matrix)\n self.bbox_program[\"m_view\"].write(self._view_matrix.astype('f4').tobytes())\n self.bbox_program[\"m_cam\"].write(camera_matrix)\n self.bbox_program[\"bb_min\"].write(self.bbox_min.astype('f4').tobytes())\n self.bbox_program[\"bb_max\"].write(self.bbox_max.astype('f4').tobytes())\n self.bbox_program[\"color\"].value = (1.0, 0.0, 0.0)\n self.bbox_vao.render(self.bbox_program)\n if not all:\n return\n for node in self.root_nodes:\n node.draw_bbox(projection_matrix, camera_matrix, self.bbox_program, self.bbox_vao)", - "docstring": "Draw scene and mesh bounding boxes" - }, - { - "code": "def size_attachments(self):\n total_size = 0\n for attachment in self.fs_cleansed_attachments:\n total_size += stat(attachment).st_size\n return total_size", - "docstring": "returns the number of bytes that the cleansed attachments take up on disk" - }, - { - "code": "def deliveries(self):\n key = make_key(\n event=self.object.event,\n owner_name=self.object.owner.username,\n identifier=self.object.identifier\n )\n return redis.lrange(key, 0, 20)", - "docstring": "Get delivery log from Redis" - }, - { - "code": "def _load(self, config):\n if isinstance(config, six.string_types):\n try:\n config = json.loads(config)\n except ValueError:\n pass\n if not isinstance(config, dict):\n raise TypeError('config block must be an istance '\n 'of dict or a valid NetJSON string')\n return config", - "docstring": "Loads config from string or dict" - }, - { - "code": "def create_cookie(host, path, secure, expires, name, value):\n return http.cookiejar.Cookie(0, name, value, None, False, host, host.startswith('.'), host.startswith('.'), path,\n True, secure, expires, False, None, None, {})", - "docstring": "Shortcut function to create a cookie" - }, - { - "code": "def html(tag):\n return (HTML_START.format(tag=tag), HTML_END.format(tag=tag))", - "docstring": "Return sequence of start and end regex patterns for simple HTML tag" - }, - { - "code": "def readFromCheckpoint(cls, checkpointDir):\n checkpointPath = cls._getModelCheckpointFilePath(checkpointDir)\n with open(checkpointPath, 'r') as f:\n proto = cls.getSchema().read(f,\n traversal_limit_in_words=_TRAVERSAL_LIMIT_IN_WORDS)\n model = cls.read(proto)\n return model", - "docstring": "Deserializes model from checkpointDir using capnproto" - }, - { - "code": "def _bits_to_dec(nm, check=True):\n if check and not is_bits_nm(nm):\n raise ValueError('_bits_to_dec: invalid netmask: \"%s\"' % nm)\n bits = int(str(nm))\n return VALID_NETMASKS[bits]", - "docstring": "Bits to decimal conversion." - }, - { - "code": "def information_content(values):\n \"Number of bits to represent the probability distribution in values.\"\n probabilities = normalize(removeall(0, values))\n return sum(-p * log2(p) for p in probabilities)", - "docstring": "Number of bits to represent the probability distribution in values." - }, - { - "code": "def validate_username(self, value):\n try:\n user = User.objects.get(username=value)\n except User.DoesNotExist:\n raise serializers.ValidationError(\"User does not exist\")\n try:\n enterprise_customer_user = models.EnterpriseCustomerUser.objects.get(user_id=user.pk)\n except models.EnterpriseCustomerUser.DoesNotExist:\n raise serializers.ValidationError(\"User has no EnterpriseCustomerUser\")\n self.enterprise_customer_user = enterprise_customer_user\n return value", - "docstring": "Verify that the username has a matching user, and that the user has an associated EnterpriseCustomerUser." - }, - { - "code": "def heappush_max(heap, item):\n heap.append(item)\n _siftdown_max(heap, 0, len(heap) - 1)", - "docstring": "Push item onto heap, maintaining the heap invariant." - }, - { - "code": "def to_atomic(amount):\n if not isinstance(amount, (Decimal, float) + _integer_types):\n raise ValueError(\"Amount '{}' doesn't have numeric type. Only Decimal, int, long and \"\n \"float (not recommended) are accepted as amounts.\")\n return int(amount * 10**12)", - "docstring": "Convert Monero decimal to atomic integer of piconero." - }, - { - "code": "def run_subcommand(netgear, args):\n subcommand = args.subcommand\n if subcommand == \"block_device\" or subcommand == \"allow_device\":\n return netgear.allow_block_device(args.mac_addr, BLOCK if subcommand == \"block_device\" else ALLOW)\n if subcommand == \"attached_devices\":\n if args.verbose:\n return netgear.get_attached_devices_2()\n else:\n return netgear.get_attached_devices()\n if subcommand == 'traffic_meter':\n return netgear.get_traffic_meter()\n if subcommand == 'login':\n return netgear.login()\n print(\"Unknown subcommand\")", - "docstring": "Runs the subcommand configured in args on the netgear session" - }, - { - "code": "def _convert_pagenum(self, kwargs):\n for key in ('next', 'previous'):\n if not kwargs.get(key):\n continue\n match = re.search(r'page=(?P[\\d]+)', kwargs[key])\n if match is None and key == 'previous':\n kwargs[key] = 1\n continue\n kwargs[key] = int(match.groupdict()['num'])", - "docstring": "Convert next and previous from URLs to integers" - }, - { - "code": "def group_dict_set(iterator: Iterable[Tuple[A, B]]) -> Mapping[A, Set[B]]:\n d = defaultdict(set)\n for key, value in iterator:\n d[key].add(value)\n return dict(d)", - "docstring": "Make a dict that accumulates the values for each key in an iterator of doubles." - }, - { - "code": "def run(*args, **kwargs):\n kwargs.setdefault('env', os.environ)\n kwargs.setdefault('shell', True)\n try:\n subprocess.check_call(' '.join(args), **kwargs)\n return True\n except subprocess.CalledProcessError:\n logger.debug('Error running: {}'.format(args))\n return False", - "docstring": "Returns True if successful, False if failure" - }, - { - "code": "def _key_pressed(self, key, keycode):\n self._namespace['key'] = key\n self._namespace['keycode'] = keycode\n self._namespace['keydown'] = True", - "docstring": "GUI callback for key pressed" - }, - { - "code": "def sync_unicorn_to_manticore(self):\n self.write_backs_disabled = True\n for reg in self.registers:\n val = self._emu.reg_read(self._to_unicorn_id(reg))\n self._cpu.write_register(reg, val)\n if len(self._mem_delta) > 0:\n logger.debug(f\"Syncing {len(self._mem_delta)} writes back into Manticore\")\n for location in self._mem_delta:\n value, size = self._mem_delta[location]\n self._cpu.write_int(location, value, size * 8)\n self.write_backs_disabled = False\n self._mem_delta = {}", - "docstring": "Copy registers and written memory back into Manticore" - }, - { - "code": "def edit(dataset_uri):\n try:\n dataset = dtoolcore.ProtoDataSet.from_uri(\n uri=dataset_uri,\n config_path=CONFIG_PATH\n )\n except dtoolcore.DtoolCoreTypeError:\n dataset = dtoolcore.DataSet.from_uri(\n uri=dataset_uri,\n config_path=CONFIG_PATH\n )\n readme_content = dataset.get_readme_content()\n try:\n readme_content = unicode(readme_content, \"utf-8\")\n except NameError:\n pass\n edited_content = click.edit(readme_content)\n if edited_content is not None:\n _validate_and_put_readme(dataset, edited_content)\n click.secho(\"Updated readme \", nl=False, fg=\"green\")\n else:\n click.secho(\"Did not update readme \", nl=False, fg=\"red\")\n click.secho(dataset_uri)", - "docstring": "Default editor updating of readme content." - }, - { - "code": "async def update_state(self, data):\r\n guild_id = int(data['guildId'])\r\n if guild_id in self.players:\r\n player = self.players.get(guild_id)\r\n player.position = data['state'].get('position', 0)\r\n player.position_timestamp = data['state']['time']", - "docstring": "Updates a player's state when a payload with opcode ``playerUpdate`` is received." - }, - { - "code": "def on_marker(self, marker):\n mid, pos = marker\n self.marker = Marker(__id__=mid)\n mapview = self.parent()\n mapview.markers[mid] = self\n self.marker.setTag(mid)\n for w in self.child_widgets():\n mapview.init_info_window_adapter()\n break\n d = self.declaration\n if d.show_info:\n self.set_show_info(d.show_info)\n del self.options", - "docstring": "Convert our options into the actual marker object" - }, - { - "code": "def keep_only_fields(self):\n for tag in self.record.keys():\n if tag not in self.fields_list:\n record_delete_fields(self.record, tag)", - "docstring": "Keep only fields listed in field_list." - }, - { - "code": "def _inherit_data(self):\n LOG.debug(\"'%s' inheriting data from '%s'\" % (self.get_name(),\n self.parent.get_name()),\n extra=dict(data=self.parent.data))\n self.set_data(**self.parent.data)", - "docstring": "Inherits the data from the parent." - }, - { - "code": "def push(self, ip_packet):\n data_len = len(ip_packet.data.data)\n seq_id = ip_packet.data.seq\n if data_len == 0:\n self._next_seq_id = seq_id\n return False\n if self._next_seq_id != -1 and seq_id != self._next_seq_id:\n return False\n self._next_seq_id = seq_id + data_len\n with self._lock_packets:\n self._length += len(ip_packet.data.data)\n self._remaining += len(ip_packet.data.data)\n self._packets.append(ip_packet)\n return True", - "docstring": "push the packet into the queue" - }, - { - "code": "def _compute(self, inputs, outputs):\n if self._sfdr is None:\n raise RuntimeError(\"Spatial pooler has not been initialized\")\n if not self.topDownMode:\n self._iterations += 1\n buInputVector = inputs['bottomUpIn']\n resetSignal = False\n if 'resetIn' in inputs:\n assert len(inputs['resetIn']) == 1\n resetSignal = inputs['resetIn'][0] != 0\n rfOutput = self._doBottomUpCompute(\n rfInput = buInputVector.reshape((1,buInputVector.size)),\n resetSignal = resetSignal\n )\n outputs['bottomUpOut'][:] = rfOutput.flat\n else:\n topDownIn = inputs.get('topDownIn',None)\n spatialTopDownOut, temporalTopDownOut = self._doTopDownInfer(topDownIn)\n outputs['spatialTopDownOut'][:] = spatialTopDownOut\n if temporalTopDownOut is not None:\n outputs['temporalTopDownOut'][:] = temporalTopDownOut\n outputs['anomalyScore'][:] = 0", - "docstring": "Run one iteration of SPRegion's compute" - }, - { - "code": "def add_download_total(rows):\n total_row = [\"\"] * len(rows[0])\n total_row[0] = \"Total\"\n total_downloads, downloads_column = get_download_total(rows)\n total_row[downloads_column] = str(total_downloads)\n rows.append(total_row)\n return rows", - "docstring": "Add a final row to rows showing the total downloads" - }, - { - "code": "def writable_stream(handle):\n if isinstance(handle, io.IOBase) and sys.version_info >= (3, 5):\n return handle.writable()\n try:\n handle.write(b'')\n except (io.UnsupportedOperation, IOError):\n return False\n else:\n return True", - "docstring": "Test whether a stream can be written to." - }, - { - "code": "def _get_printable_columns(columns, row):\n if not columns:\n return row\n return tuple(row[c] for c in columns)", - "docstring": "Return only the part of the row which should be printed." - }, - { - "code": "def _update_optional(cobra_object, new_dict, optional_attribute_dict,\n ordered_keys):\n for key in ordered_keys:\n default = optional_attribute_dict[key]\n value = getattr(cobra_object, key)\n if value is None or value == default:\n continue\n new_dict[key] = _fix_type(value)", - "docstring": "update new_dict with optional attributes from cobra_object" - }, - { - "code": "def play_stop(self, call_params):\n path = '/' + self.api_version + '/PlayStop/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST PlayStop on a Call Helper" - }, - { - "code": "def _store_helper(model: Action, session: Optional[Session] = None) -> None:\n if session is None:\n session = _make_session()\n session.add(model)\n session.commit()\n session.close()", - "docstring": "Help store an action." - }, - { - "code": "def post_deploy(self):\n for service in self.genv.services:\n service = service.strip().upper()\n self.vprint('post_deploy:', service)\n funcs = common.service_post_deployers.get(service)\n if funcs:\n self.vprint('Running post-deployments for service %s...' % (service,))\n for func in funcs:\n try:\n func()\n except Exception as e:\n print('Post deployment error: %s' % e, file=sys.stderr)\n print(traceback.format_exc(), file=sys.stderr)", - "docstring": "Runs methods services have requested be run before after deployment." - }, - { - "code": "def register(self, cls):\n preview = cls(site=self)\n logger.debug('Registering %r with %r', preview, self)\n index = self.__previews.setdefault(preview.module, {})\n index[cls.__name__] = preview", - "docstring": "Adds a preview to the index." - }, - { - "code": "def EXTCODECOPY(self, account, address, offset, size):\n extbytecode = self.world.get_code(account)\n self._allocate(address + size)\n for i in range(size):\n if offset + i < len(extbytecode):\n self._store(address + i, extbytecode[offset + i])\n else:\n self._store(address + i, 0)", - "docstring": "Copy an account's code to memory" - }, - { - "code": "def infer_assignment(self):\n \"Return the partial assignment implied by the current inferences.\"\n self.support_pruning()\n return dict((v, self.curr_domains[v][0])\n for v in self.vars if 1 == len(self.curr_domains[v]))", - "docstring": "Return the partial assignment implied by the current inferences." - }, - { - "code": "def _calc_hash_da(self, rs):\n self.hash_d = hash_(rs.get_state())[:6]\n self.hash_a = self.hash_d", - "docstring": "Compute hash of D and A timestamps for single-step D+A case." - }, - { - "code": "def filter_spouts(table, header):\n spouts_info = []\n for row in table:\n if row[0] == 'spout':\n spouts_info.append(row)\n return spouts_info, header", - "docstring": "filter to keep spouts" - }, - { - "code": "def adsPortCloseEx(port):\n port_close_ex = _adsDLL.AdsPortCloseEx\n port_close_ex.restype = ctypes.c_long\n error_code = port_close_ex(port)\n if error_code:\n raise ADSError(error_code)", - "docstring": "Close the connection to the TwinCAT message router." - }, - { - "code": "def register_metrics(self, metrics_collector, interval):\n for field, metrics in self.metrics.items():\n metrics_collector.register_metric(field, metrics, interval)", - "docstring": "Registers its metrics to a given metrics collector with a given interval" - }, - { - "code": "def enumerate_chunks (phrase, spacy_nlp):\n if (len(phrase) > 1):\n found = False\n text = \" \".join([rl.text for rl in phrase])\n doc = spacy_nlp(text.strip(), parse=True)\n for np in doc.noun_chunks:\n if np.text != text:\n found = True\n yield np.text, find_chunk(phrase, np.text.split(\" \"))\n if not found and all([rl.pos[0] != \"v\" for rl in phrase]):\n yield text, phrase", - "docstring": "iterate through the noun phrases" - }, - { - "code": "def intersection(self, i):\n if self.intersects(i):\n return Interval(max(self.start, i.start), min(self.end, i.end))\n else:\n return None", - "docstring": "If intervals intersect, returns their intersection, otherwise returns None" - }, - { - "code": "def argument_count(self):\n arguments, _ = self.argparser.parse_known_args()\n return self.count(**vars(arguments))", - "docstring": "Uses the command line arguments to fill the count function and call it." - }, - { - "code": "async def _on_receive_array(self, array):\n if array[0] == 'noop':\n pass\n else:\n wrapper = json.loads(array[0]['p'])\n if '3' in wrapper:\n self._client_id = wrapper['3']['2']\n logger.info('Received new client_id: %r', self._client_id)\n await self._add_channel_services()\n if '2' in wrapper:\n pblite_message = json.loads(wrapper['2']['2'])\n if pblite_message[0] == 'cbu':\n batch_update = hangouts_pb2.BatchUpdate()\n pblite.decode(batch_update, pblite_message,\n ignore_first_item=True)\n for state_update in batch_update.state_update:\n logger.debug('Received StateUpdate:\\n%s', state_update)\n header = state_update.state_update_header\n self._active_client_state = header.active_client_state\n await self.on_state_update.fire(state_update)\n else:\n logger.info('Ignoring message: %r', pblite_message[0])", - "docstring": "Parse channel array and call the appropriate events." - }, - { - "code": "async def _now(self, ctx):\r\n player = self.bot.lavalink.players.get(ctx.guild.id)\r\n song = 'Nothing'\r\n if player.current:\r\n position = lavalink.Utils.format_time(player.position)\r\n if player.current.stream:\r\n duration = '🔴 LIVE'\r\n else:\r\n duration = lavalink.Utils.format_time(player.current.duration)\r\n song = f'**[{player.current.title}]({player.current.uri})**\\n({position}/{duration})'\r\n embed = discord.Embed(color=discord.Color.blurple(), title='Now Playing', description=song)\r\n await ctx.send(embed=embed)", - "docstring": "Shows some stats about the currently playing song." - }, - { - "code": "def count_params(self):\n n_params = 0\n for _i, p in enumerate(self.all_params):\n n = 1\n for s in p.get_shape():\n try:\n s = int(s)\n except Exception:\n s = 1\n if s:\n n = n * s\n n_params = n_params + n\n return n_params", - "docstring": "Returns the number of parameters in the network." - }, - { - "code": "def available_sources(sources):\n for dirs, name in sources:\n for directory in dirs:\n fn = os.path.join(directory, name) + '.py'\n if os.path.isfile(fn):\n yield fn", - "docstring": "Yield the sources that are present." - }, - { - "code": "def _top(self, n=0):\n if len(self.stack) - n < 0:\n raise StackUnderflow()\n return self.stack[n - 1]", - "docstring": "Read a value from the top of the stack without removing it" - }, - { - "code": "def doublewell(theta):\n k0, k1, depth = 0.01, 100, 0.5\n shallow = 0.5 * k0 * theta ** 2 + depth\n deep = 0.5 * k1 * theta ** 2\n obj = float(np.minimum(shallow, deep))\n grad = np.where(deep < shallow, k1 * theta, k0 * theta)\n return obj, grad", - "docstring": "Pointwise minimum of two quadratic bowls" - }, - { - "code": "def convert_html_subscripts_to_latex(text):\n text = re.sub(\"(.*?)\", r\"$_{\\1}$\", text)\n text = re.sub(\"(.*?)\", r\"$^{\\1}$\", text)\n return text", - "docstring": "Convert some HTML tags to latex equivalents." - }, - { - "code": "def write_bel_annotation(self, file: TextIO) -> None:\n if not self.is_populated():\n self.populate()\n values = self._get_namespace_name_to_encoding(desc='writing names')\n write_annotation(\n keyword=self._get_namespace_keyword(),\n citation_name=self._get_namespace_name(),\n description='',\n values=values,\n file=file,\n )", - "docstring": "Write as a BEL annotation file." - }, - { - "code": "def delete_password(self, service, username):\n if not self.connected(service):\n raise PasswordDeleteError(\"Cancelled by user\")\n if not self.iface.hasEntry(self.handle, service, username, self.appid):\n raise PasswordDeleteError(\"Password not found\")\n self.iface.removeEntry(self.handle, service, username, self.appid)", - "docstring": "Delete the password for the username of the service." - }, - { - "code": "def validate(schema_file=None, jams_files=None):\n schema = load_json(schema_file)\n for jams_file in jams_files:\n try:\n jams = load_json(jams_file)\n jsonschema.validate(jams, schema)\n print '{:s} was successfully validated'.format(jams_file)\n except jsonschema.ValidationError as exc:\n print '{:s} was NOT successfully validated'.format(jams_file)\n print exc", - "docstring": "Validate a jams file against a schema" - }, - { - "code": "def node(s, node, alpha=1.0):\n if s.depth:\n try: colors.shadow(dx=5, dy=5, blur=10, alpha=0.5*alpha)\n except: pass\n s._ctx.nofill()\n s._ctx.nostroke()\n if s.fill:\n s._ctx.fill(\n s.fill.r, \n s.fill.g, \n s.fill.b, \n s.fill.a * alpha\n )\n if s.stroke: \n s._ctx.strokewidth(s.strokewidth)\n s._ctx.stroke(\n s.stroke.r, \n s.stroke.g, \n s.stroke.b, \n s.stroke.a * alpha * 3\n )\n r = node.r\n s._ctx.oval(node.x-r, node.y-r, r*2, r*2)", - "docstring": "Visualization of a default node." - }, - { - "code": "def get(self, request, template_id, view_type):\n template = get_object_or_404(EnrollmentNotificationEmailTemplate, pk=template_id)\n if view_type not in self.view_type_contexts:\n return HttpResponse(status=404)\n base_context = self.view_type_contexts[view_type].copy()\n base_context.update({'user_name': self.get_user_name(request)})\n return HttpResponse(template.render_html_template(base_context), content_type='text/html')", - "docstring": "Render the given template with the stock data." - }, - { - "code": "def soft_equals(a, b):\n if isinstance(a, str) or isinstance(b, str):\n return str(a) == str(b)\n if isinstance(a, bool) or isinstance(b, bool):\n return bool(a) is bool(b)\n return a == b", - "docstring": "Implements the '==' operator, which does type JS-style coertion." - }, - { - "code": "def _unicode(string):\n for encoding in ['utf-8', 'latin1']:\n try:\n result = unicode(string, encoding)\n return result\n except UnicodeDecodeError:\n pass\n result = unicode(string, 'utf-8', 'replace')\n return result", - "docstring": "Try to convert a string to unicode using different encodings" - }, - { - "code": "def matrixValues(matrix,key):\n assert key in matrix.dtype.names\n col=matrix.dtype.names.index(key)\n values=np.empty(len(matrix))*np.nan\n for i in range(len(matrix)):\n values[i]=matrix[i][col]\n return values", - "docstring": "given a key, return a list of values from the matrix with that key." - }, - { - "code": "def union(self, iterable):\n _dict = self._dict\n append = self.append\n for i in iterable:\n if i.id not in _dict:\n append(i)", - "docstring": "adds elements with id's not already in the model" - }, - { - "code": "def clean(s):\n lines = [l.rstrip() for l in s.split('\\n')]\n return '\\n'.join(lines)", - "docstring": "Removes trailing whitespace on each line." - }, - { - "code": "def createEncoder():\n consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name=\"consumption\",\n clipInput=True)\n time_encoder = DateEncoder(timeOfDay=(21, 9.5), name=\"timestamp_timeOfDay\")\n encoder = MultiEncoder()\n encoder.addEncoder(\"consumption\", consumption_encoder)\n encoder.addEncoder(\"timestamp\", time_encoder)\n return encoder", - "docstring": "Create the encoder instance for our test and return it." - }, - { - "code": "def print_summary(string='Repository', git_path=None):\n if git_path is None: git_path = GIT_PATH\n if not git_path_valid():\n print('\\n%s revision unknown (git not found).' % string)\n else:\n last_commit = get_last_commit_line()\n print('\\n{} revision:\\n {}\\n'.format(string, last_commit))\n if not check_clean_status():\n print('\\nWARNING -> Uncommitted changes:')\n print(get_status())", - "docstring": "Print the last commit line and eventual uncommitted changes." - }, - { - "code": "def _create_kernel(self):\n kernels = self.kernel_params\n if not isinstance(kernels, list):\n raise RuntimeError('Must provide enumeration of kernels')\n for kernel in kernels:\n if sorted(list(kernel.keys())) != ['name', 'options', 'params']:\n raise RuntimeError(\n 'strategy/params/kernels must contain keys: \"name\", \"options\", \"params\"')\n kernels = []\n for kern in self.kernel_params:\n params = kern['params']\n options = kern['options']\n name = kern['name']\n kernel_ep = load_entry_point(name, 'strategy/params/kernels')\n if issubclass(kernel_ep, KERNEL_BASE_CLASS):\n if options['independent']:\n kernel = np.sum([kernel_ep(1, active_dims=[i], **params) for i in range(self.n_dims)])\n else:\n kernel = kernel_ep(self.n_dims, **params)\n if not isinstance(kernel, KERNEL_BASE_CLASS):\n raise RuntimeError('strategy/params/kernel must load a'\n 'GPy derived Kernel')\n kernels.append(kernel)\n self.kernel = np.sum(kernels)", - "docstring": "creates an additive kernel" - }, - { - "code": "def import_demo_experience(self, states, internals, actions, terminal, reward):\n fetches = self.import_demo_experience_output\n feed_dict = self.get_feed_dict(\n states=states,\n internals=internals,\n actions=actions,\n terminal=terminal,\n reward=reward\n )\n self.monitored_session.run(fetches=fetches, feed_dict=feed_dict)", - "docstring": "Stores demonstrations in the demo memory." - }, - { - "code": "def scaffold():\n click.echo(\"A whole new site? Awesome.\")\n title = click.prompt(\"What's the title?\")\n url = click.prompt(\"Great. What's url? http://\")\n click.echo(\"Got it. Creating %s...\" % url)", - "docstring": "Start a new site." - }, - { - "code": "def toBigInt(byteArray):\n array = byteArray[::-1]\n out = 0\n for key, value in enumerate(array):\n decoded = struct.unpack(\"B\", bytes([value]))[0]\n out = out | decoded << key * 8\n return out", - "docstring": "Convert the byte array to a BigInteger" - }, - { - "code": "def _serialize_items(self, channel_metadata_items):\n return json.dumps(\n self._prepare_items_for_transmission(channel_metadata_items),\n sort_keys=True\n ).encode('utf-8')", - "docstring": "Serialize content metadata items for a create transmission to the integrated channel." - }, - { - "code": "def ingest_vectors(self, output_port_value):\n ingest_task = Task('IngestItemJsonToVectorServices')\n ingest_task.inputs.items = output_port_value\n ingest_task.impersonation_allowed = True\n stage_task = Task('StageDataToS3')\n stage_task.inputs.destination = 's3://{vector_ingest_bucket}/{recipe_id}/{run_id}/{task_name}'\n stage_task.inputs.data = ingest_task.outputs.result.value\n self.definition['tasks'].append(ingest_task.generate_task_workflow_json())\n self.definition['tasks'].append(stage_task.generate_task_workflow_json())", - "docstring": "append two required tasks to the given output to ingest to VS" - }, - { - "code": "def pprint(value):\n click.echo(\n json.dumps(value,\n sort_keys=True,\n indent=4,\n separators=(',', ': ')))", - "docstring": "Prints as formatted JSON" - }, - { - "code": "def decode(cls, phrase):\n phrase = phrase.split(\" \")\n out = \"\"\n for i in range(len(phrase) // 3):\n word1, word2, word3 = phrase[3*i:3*i+3]\n w1 = cls.word_list.index(word1)\n w2 = cls.word_list.index(word2) % cls.n\n w3 = cls.word_list.index(word3) % cls.n\n x = w1 + cls.n *((w2 - w1) % cls.n) + cls.n * cls.n * ((w3 - w2) % cls.n)\n out += endian_swap(\"%08x\" % x)\n return out", - "docstring": "Calculate hexadecimal representation of the phrase." - }, - { - "code": "def parse_option_settings(option_settings):\n ret = []\n for namespace, params in list(option_settings.items()):\n for key, value in list(params.items()):\n ret.append((namespace, key, value))\n return ret", - "docstring": "Parses option_settings as they are defined in the configuration file" - }, - { - "code": "def lcv(var, assignment, csp):\n \"Least-constraining-values heuristic.\"\n return sorted(csp.choices(var),\n key=lambda val: csp.nconflicts(var, val, assignment))", - "docstring": "Least-constraining-values heuristic." - }, - { - "code": "def fix_dashes(string):\n string = string.replace(u'\\u05BE', '-')\n string = string.replace(u'\\u1806', '-')\n string = string.replace(u'\\u2E3A', '-')\n string = string.replace(u'\\u2E3B', '-')\n string = unidecode(string)\n return re.sub(r'--+', '-', string)", - "docstring": "Fix bad Unicode special dashes in string." - }, - { - "code": "def confluence(ctx, no_publish=False, clean=False, opts=''):\n cfg = config.load()\n if clean:\n ctx.run(\"invoke clean --docs\")\n cmd = ['sphinx-build', '-b', 'confluence']\n cmd.extend(['-E', '-a'])\n if opts:\n cmd.append(opts)\n cmd.extend(['.', ctx.rituals.docs.build + '_cf'])\n if no_publish:\n cmd.extend(['-Dconfluence_publish=False'])\n notify.info(\"Starting Sphinx build...\")\n with pushd(ctx.rituals.docs.sources):\n ctx.run(' '.join(cmd), pty=True)", - "docstring": "Build Sphinx docs and publish to Confluence." - }, - { - "code": "def file_list(self):\n log.info('Listing files')\n res = self.__exchange(LIST_FILES)\n res = res.split('\\r\\n')\n res = res[1:-1]\n files = []\n for line in res:\n files.append(line.split('\\t'))\n return files", - "docstring": "list files on the device" - }, - { - "code": "def yaml_get_data(filename):\n with open(filename, 'rb') as fd:\n yaml_data = yaml.load(fd)\n return yaml_data\n return False", - "docstring": "Get data from .yml file" - }, - { - "code": "def labels(self):\n return sorted(self.channels, key=lambda c: self.channels[c])", - "docstring": "Return the names of our marker labels in canonical order." - }, - { - "code": "def write_usnps(data, sidx, pnames):\n tmparrs = os.path.join(data.dirs.outfiles, \"tmp-{}.h5\".format(data.name)) \n with h5py.File(tmparrs, 'r') as io5:\n bisarr = io5[\"bisarr\"]\n end = np.where(np.all(bisarr[:] == \"\", axis=0))[0]\n if np.any(end):\n end = end.min()\n else:\n end = bisarr.shape[1] \n with open(data.outfiles.usnpsphy, 'w') as out:\n out.write(\"{} {}\\n\".format(bisarr.shape[0], end))\n for idx, name in enumerate(pnames):\n out.write(\"{}{}\\n\".format(name, \"\".join(bisarr[idx, :end])))", - "docstring": "write the bisnp string" - }, - { - "code": "def copy(self, source, destination, recursive=False, use_sudo=False):\n func = use_sudo and run_as_root or self.run\n options = '-r ' if recursive else ''\n func('/bin/cp {0}{1} {2}'.format(options, quote(source), quote(destination)))", - "docstring": "Copy a file or directory" - }, - { - "code": "def clone_with_updates(self, **kwargs):\n fields_dict = self.to_dict()\n fields_dict.update(kwargs)\n return BindingPrediction(**fields_dict)", - "docstring": "Returns new BindingPrediction with updated fields" - }, - { - "code": "def run_server(self, port):\n try:\n self.server = MultiThreadedHTTPServer(('0.0.0.0', port), Handler)\n except socket.error, e:\n logger.error(str(e))\n sys.exit(1)\n logger.info(\"HTTP serve at http://0.0.0.0:%d (ctrl-c to stop) ...\"\n % port)\n try:\n self.server.serve_forever()\n except KeyboardInterrupt:\n logger.info(\"^C received, shutting down server\")\n self.shutdown_server()", - "docstring": "run a server binding to port" - }, - { - "code": "def python_value(self, value):\n value = super(OrderedUUIDField, self).python_value(value)\n u = binascii.b2a_hex(value)\n value = u[8:16] + u[4:8] + u[0:4] + u[16:22] + u[22:32]\n return UUID(value.decode())", - "docstring": "Convert binary blob to UUID instance" - }, - { - "code": "def add_edge(self, edge):\n \"Add edge to chart, and see if it extends or predicts another edge.\"\n start, end, lhs, found, expects = edge\n if edge not in self.chart[end]:\n self.chart[end].append(edge)\n if self.trace:\n print '%10s: added %s' % (caller(2), edge)\n if not expects:\n self.extender(edge)\n else:\n self.predictor(edge)", - "docstring": "Add edge to chart, and see if it extends or predicts another edge." - }, - { - "code": "def search(q, start=0, wait=10, asynchronous=False, cached=False):\n service = GOOGLE_SEARCH\n return GoogleSearch(q, start, service, \"\", wait, asynchronous, cached)", - "docstring": "Returns a Google web query formatted as a GoogleSearch list object." - }, - { - "code": "def _dump_knitting_pattern(self, file):\n knitting_pattern_set = self.__on_dump()\n knitting_pattern = knitting_pattern_set.patterns.at(0)\n layout = GridLayout(knitting_pattern)\n builder = AYABPNGBuilder(*layout.bounding_box)\n builder.set_colors_in_grid(layout.walk_instructions())\n builder.write_to_file(file)", - "docstring": "dump a knitting pattern to a file." - }, - { - "code": "def remove_edge(self, id1, id2):\n for e in list(self.edges):\n if id1 in (e.node1.id, e.node2.id) and \\\n id2 in (e.node1.id, e.node2.id):\n e.node1.links.remove(e.node2)\n e.node2.links.remove(e.node1)\n self.edges.remove(e)", - "docstring": "Remove edges between nodes with given id's." - }, - { - "code": "def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):\n docker_paths = sorted([\n var.docker_path if var.recursive else os.path.dirname(var.docker_path)\n for var in inputs | outputs | mounts\n if var.value\n ])\n env = {\n _SCRIPT_VARNAME: repr(script.value),\n _META_YAML_VARNAME: repr(job_descriptor.to_yaml()),\n 'DIR_COUNT': str(len(docker_paths))\n }\n for idx, path in enumerate(docker_paths):\n env['DIR_{}'.format(idx)] = os.path.join(providers_util.DATA_MOUNT_POINT,\n path)\n return env", - "docstring": "Return a dict with variables for the 'prepare' action." - }, - { - "code": "def _discover_via_entrypoints(self):\n emgr = extension.ExtensionManager(PLUGIN_EP, invoke_on_load=False)\n return ((ext.name, ext.plugin) for ext in emgr)", - "docstring": "Looks for modules with amtching entry points." - }, - { - "code": "def put(self, request, response):\n if self.slug is None:\n raise http.exceptions.NotImplemented()\n target = self.read()\n data = self._clean(target, self.request.read(deserialize=True))\n if target is not None:\n self.assert_operations('update')\n try:\n self.update(target, data)\n except AttributeError:\n raise http.exceptions.NotImplemented()\n self.make_response(target)\n else:\n self.assert_operations('create')\n target = self.create(data)\n self.response.status = http.client.CREATED\n self.make_response(target)", - "docstring": "Processes a `PUT` request." - }, - { - "code": "def _create_transmissions(self, content_metadata_item_map):\n ContentMetadataItemTransmission = apps.get_model(\n 'integrated_channel',\n 'ContentMetadataItemTransmission'\n )\n transmissions = []\n for content_id, channel_metadata in content_metadata_item_map.items():\n transmissions.append(\n ContentMetadataItemTransmission(\n enterprise_customer=self.enterprise_configuration.enterprise_customer,\n integrated_channel_code=self.enterprise_configuration.channel_code(),\n content_id=content_id,\n channel_metadata=channel_metadata\n )\n )\n ContentMetadataItemTransmission.objects.bulk_create(transmissions)", - "docstring": "Create ContentMetadataItemTransmision models for the given content metadata items." - }, - { - "code": "def stored_messages_archive(context, num_elements=10):\n if \"user\" in context:\n user = context[\"user\"]\n if user.is_authenticated():\n qs = MessageArchive.objects.select_related(\"message\").filter(user=user)\n return {\n \"messages\": qs[:num_elements],\n \"count\": qs.count(),\n }", - "docstring": "Renders a list of archived messages for the current user" - }, - { - "code": "def percept(self, agent):\n \"By default, agent perceives things within a default radius.\"\n return [self.thing_percept(thing, agent)\n for thing in self.things_near(agent.location)]", - "docstring": "By default, agent perceives things within a default radius." - }, - { - "code": "def __intermediate_dns_search(self, uci, address):\n if 'dns_search' in uci:\n return uci['dns_search']\n if address['proto'] == 'none':\n return None\n dns_search = self.netjson.get('dns_search', None)\n if dns_search:\n return ' '.join(dns_search)", - "docstring": "determines UCI interface \"dns_search\" option" - }, - { - "code": "def save(self):\n course_id = self.validated_data['course_id']\n __, created = models.EnterpriseCourseEnrollment.objects.get_or_create(\n enterprise_customer_user=self.enterprise_customer_user,\n course_id=course_id,\n )\n if created:\n track_enrollment('rest-api-enrollment', self.enterprise_customer_user.user_id, course_id)", - "docstring": "Save the model with the found EnterpriseCustomerUser." - }, - { - "code": "def post_track(session, user_id, project_id, latitude, longitude):\n tracking_data = {\n 'user_id': user_id,\n 'project_id': project_id,\n 'track_point': {\n 'latitude': latitude,\n 'longitude': longitude\n }\n }\n response = make_post_request(session, 'tracks',\n json_data=tracking_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['result']\n else:\n raise TrackNotCreatedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Start tracking a project by creating a track" - }, - { - "code": "def _extension_module_tags():\n import sysconfig\n tags = []\n if six.PY2:\n multiarch = sysconfig.get_config_var('MULTIARCH')\n if multiarch is not None:\n tags.append(multiarch)\n else:\n tags.append(sysconfig.get_config_var('SOABI'))\n tags.append('abi3')\n tags = [t for t in tags if t]\n return tags", - "docstring": "Returns valid tags an extension module might have" - }, - { - "code": "def record_add_field(rec, tag, ind1='', ind2='', subfields=[],\n controlfield_value=''):\n if controlfield_value:\n doc = etree.Element(\"controlfield\",\n attrib={\n \"tag\": tag,\n })\n doc.text = unicode(controlfield_value)\n else:\n doc = etree.Element(\"datafield\",\n attrib={\n \"tag\": tag,\n \"ind1\": ind1,\n \"ind2\": ind2,\n })\n for code, value in subfields:\n field = etree.SubElement(doc, \"subfield\", attrib={\"code\": code})\n field.text = value\n rec.append(doc)\n return rec", - "docstring": "Add a MARCXML datafield as a new child to a XML document." - }, - { - "code": "def ternary_operation(x):\n g = tf.get_default_graph()\n with g.gradient_override_map({\"Sign\": \"Identity\"}):\n threshold = _compute_threshold(x)\n x = tf.sign(tf.add(tf.sign(tf.add(x, threshold)), tf.sign(tf.add(x, -threshold))))\n return x", - "docstring": "Ternary operation use threshold computed with weights." - }, - { - "code": "def append(self, key: str, value: str) -> None:\n append_key = key.lower().encode(\"latin-1\")\n append_value = value.encode(\"latin-1\")\n self._list.append((append_key, append_value))", - "docstring": "Append a header, preserving any duplicate entries." - }, - { - "code": "def delete_user_jobs(session, job_ids):\n jobs_data = {\n 'jobs[]': job_ids\n }\n response = make_delete_request(session, 'self/jobs', json_data=jobs_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n raise UserJobsNotDeletedException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Remove a list of jobs from the currently authenticated user" - }, - { - "code": "def guid(self, guid):\n return self._json(self._get(self._build_url('guids', guid)), 200)['data']['type']", - "docstring": "Determines JSONAPI type for provided GUID" - }, - { - "code": "def less(a, b, *args):\n types = set([type(a), type(b)])\n if float in types or int in types:\n try:\n a, b = float(a), float(b)\n except TypeError:\n return False\n return a < b and (not args or less(b, *args))", - "docstring": "Implements the '<' operator with JS-style type coertion." - }, - { - "code": "def resource(**kwargs):\n def inner(function):\n name = kwargs.pop('name', None)\n if name is None:\n name = utils.dasherize(function.__name__)\n methods = kwargs.pop('methods', None)\n if isinstance(methods, six.string_types):\n methods = methods,\n handler = (function, methods)\n if name not in _resources:\n _handlers[name] = []\n from armet import resources\n kwargs['name'] = name\n class LightweightResource(resources.Resource):\n Meta = type(str('Meta'), (), kwargs)\n def route(self, request, response):\n for handler, methods in _handlers[name]:\n if methods is None or request.method in methods:\n return handler(request, response)\n resources.Resource.route(self)\n _resources[name] = LightweightResource\n _handlers[name].append(handler)\n return _resources[name]\n return inner", - "docstring": "Wraps the decorated function in a lightweight resource." - }, - { - "code": "def _process_worker(process, process_tile):\n logger.debug((process_tile.id, \"running on %s\" % current_process().name))\n if (\n process.config.mode == \"continue\" and\n process.config.output.tiles_exist(process_tile)\n ):\n logger.debug((process_tile.id, \"tile exists, skipping\"))\n return ProcessInfo(\n tile=process_tile,\n processed=False,\n process_msg=\"output already exists\",\n written=False,\n write_msg=\"nothing written\"\n )\n else:\n with Timer() as t:\n try:\n output = process.execute(process_tile, raise_nodata=True)\n except MapcheteNodataTile:\n output = None\n processor_message = \"processed in %s\" % t\n logger.debug((process_tile.id, processor_message))\n writer_info = process.write(process_tile, output)\n return ProcessInfo(\n tile=process_tile,\n processed=True,\n process_msg=processor_message,\n written=writer_info.written,\n write_msg=writer_info.write_msg\n )", - "docstring": "Worker function running the process." - }, - { - "code": "def _encode(self, data, algorithm, key=None):\n if algorithm['type'] == 'hmac':\n return data + self._hmac_generate(data, algorithm, key)\n elif algorithm['type'] == 'aes':\n return self._aes_encrypt(data, algorithm, key)\n elif algorithm['type'] == 'no-serialization':\n return data\n elif algorithm['type'] == 'json':\n return json.dumps(data)\n elif algorithm['type'] == 'no-compression':\n return data\n elif algorithm['type'] == 'gzip':\n return self._zlib_compress(data, algorithm)\n else:\n raise Exception('Algorithm not supported: %s' % algorithm['type'])", - "docstring": "Encode data with specific algorithm" - }, - { - "code": "def delete_connection():\n if _CON_SYM_ in globals():\n con = globals().pop(_CON_SYM_)\n if not getattr(con, '_session').start(): con.stop()", - "docstring": "Stop and destroy Bloomberg connection" - }, - { - "code": "def __crawl(self, crawl_candidate):\n def crawler_wrapper(parser, parsers_lst, crawl_candidate):\n try:\n crawler = Crawler(self.config, self.fetcher)\n article = crawler.crawl(crawl_candidate)\n except (UnicodeDecodeError, ValueError) as ex:\n if parsers_lst:\n parser = parsers_lst.pop(0)\n return crawler_wrapper(parser, parsers_lst, crawl_candidate)\n else:\n raise ex\n return article\n parsers = list(self.config.available_parsers)\n parsers.remove(self.config.parser_class)\n return crawler_wrapper(self.config.parser_class, parsers, crawl_candidate)", - "docstring": "wrap the crawling functionality" - }, - { - "code": "def _angle(self):\n from math import atan, pi, degrees\n a = degrees(atan(self.vy/self.vx)) + 360\n if self.vx < 0: a += 180\n return a", - "docstring": "Returns the angle towards which the boid is steering." - }, - { - "code": "def _make_prefix(self):\n fromprefix = \"from%d_\" % HtmlDiff._default_prefix\n toprefix = \"to%d_\" % HtmlDiff._default_prefix\n HtmlDiff._default_prefix += 1\n self._prefix = [fromprefix,toprefix]", - "docstring": "Create unique anchor prefixes" - }, - { - "code": "def validate(self, data):\n lms_user_id = data.get('lms_user_id')\n tpa_user_id = data.get('tpa_user_id')\n user_email = data.get('user_email')\n if not lms_user_id and not tpa_user_id and not user_email:\n raise serializers.ValidationError(\n 'At least one of the following fields must be specified and map to an EnterpriseCustomerUser: '\n 'lms_user_id, tpa_user_id, user_email'\n )\n return data", - "docstring": "Validate that at least one of the user identifier fields has been passed in." - }, - { - "code": "def _get_error(self, stanza):\n if stanza:\n logger.debug(u\"Roster request failed: {0}\".format(\n stanza.error.condition_name))\n else:\n logger.debug(u\"Roster request failed: timeout\")\n self._event_queue.put(RosterNotReceivedEvent(self, stanza))", - "docstring": "Handle failure of the roster request." - }, - { - "code": "def init_repo(self, gitdir):\n hooksdir = os.path.join(gitdir, 'hooks')\n content = postreceive_template % {\n 'client': self.client,\n 'bucket': self.bucket,\n 's3cfg': self.s3cfg,\n 'prefix': self.prefix\n }\n postrecv_filename =os.path.join(hooksdir, 'post-receive')\n with open(postrecv_filename,'w') as fd:\n fd.write(content)\n self.make_hook_executable(postrecv_filename)\n print(\"Wrote to\", postrecv_filename)", - "docstring": "Insert hook into the repo" - }, - { - "code": "def _fill_pattern_collection(self, pattern_collection, values):\n pattern = values.get(PATTERNS, [])\n for pattern_to_parse in pattern:\n parsed_pattern = self._pattern(pattern_to_parse)\n pattern_collection.append(parsed_pattern)", - "docstring": "Fill a pattern collection." - }, - { - "code": "def hex_escape(bin_str):\n printable = string.ascii_letters + string.digits + string.punctuation + ' '\n return ''.join(ch if ch in printable else r'0x{0:02x}'.format(ord(ch)) for ch in bin_str)", - "docstring": "Hex encode a binary string" - }, - { - "code": "def glob2re(part):\n return \"[^/]*\".join(\n re.escape(bit).replace(r'\\[\\^', '[^').replace(r'\\[', '[').replace(r'\\]', ']')\n for bit in part.split(\"*\")\n )", - "docstring": "Convert a path part to regex syntax." - }, - { - "code": "def _check_limit(self, event):\n if self.count(event) > self.max_listeners:\n warnings.warn(\n 'Too many listeners for event {}'.format(event),\n ResourceWarning,\n )", - "docstring": "Check if the listener limit is hit and warn if needed." - }, - { - "code": "def requireAnomalyModel(func):\n @wraps(func)\n def _decorator(self, *args, **kwargs):\n if not self.getInferenceType() == InferenceType.TemporalAnomaly:\n raise RuntimeError(\"Method required a TemporalAnomaly model.\")\n if self._getAnomalyClassifier() is None:\n raise RuntimeError(\"Model does not support this command. Model must\"\n \"be an active anomalyDetector model.\")\n return func(self, *args, **kwargs)\n return _decorator", - "docstring": "Decorator for functions that require anomaly models." - }, - { - "code": "def count_confidences(graph: BELGraph) -> typing.Counter[str]:\n return Counter(\n (\n 'None'\n if ANNOTATIONS not in data or 'Confidence' not in data[ANNOTATIONS] else\n list(data[ANNOTATIONS]['Confidence'])[0]\n )\n for _, _, data in graph.edges(data=True)\n if CITATION in data\n )", - "docstring": "Count the confidences in the graph." - }, - { - "code": "def _fly(self, board, layers, things, the_plot):\n if self.character in the_plot['bunker_hitters']:\n return self._teleport((-1, -1))\n if self.position == things['P'].position: the_plot.terminate_episode()\n self._south(board, the_plot)", - "docstring": "Handles the behaviour of visible bolts flying toward the player." - }, - { - "code": "def add_files(self, repo, files):\n rootdir = repo.rootdir\n for f in files:\n relativepath = f['relativepath']\n sourcepath = f['localfullpath']\n if sourcepath is None:\n continue\n targetpath = os.path.join(rootdir, relativepath)\n try:\n os.makedirs(os.path.dirname(targetpath))\n except:\n pass\n print(\"Updating: {}\".format(relativepath))\n shutil.copyfile(sourcepath, targetpath)\n with cd(repo.rootdir):\n self._run(['add', relativepath])", - "docstring": "Add files to the repo" - }, - { - "code": "def update_function(self, param_vals):\n self.model = self.func(param_vals, *self.func_args, **self.func_kwargs)\n d = self.calc_residuals()\n return np.dot(d.flat, d.flat)", - "docstring": "Takes an array param_vals, updates function, returns the new error" - }, - { - "code": "def supported_versions(django, cms):\n cms_version = None\n django_version = None\n try:\n cms_version = Decimal(cms)\n except (ValueError, InvalidOperation):\n try:\n cms_version = CMS_VERSION_MATRIX[str(cms)]\n except KeyError:\n pass\n try:\n django_version = Decimal(django)\n except (ValueError, InvalidOperation):\n try:\n django_version = DJANGO_VERSION_MATRIX[str(django)]\n except KeyError:\n pass\n try:\n if (\n cms_version and django_version and\n not (LooseVersion(VERSION_MATRIX[compat.unicode(cms_version)][0]) <=\n LooseVersion(compat.unicode(django_version)) <=\n LooseVersion(VERSION_MATRIX[compat.unicode(cms_version)][1]))\n ):\n raise RuntimeError(\n 'Django and django CMS versions doesn\\'t match: '\n 'Django {0} is not supported by django CMS {1}'.format(django_version, cms_version)\n )\n except KeyError:\n raise RuntimeError(\n 'Django and django CMS versions doesn\\'t match: '\n 'Django {0} is not supported by django CMS {1}'.format(django_version, cms_version)\n )\n return (\n compat.unicode(django_version) if django_version else django_version,\n compat.unicode(cms_version) if cms_version else cms_version\n )", - "docstring": "Convert numeric and literal version information to numeric format" - }, - { - "code": "def update_isbn(self):\n isbns = record_get_field_instances(self.record, '020')\n for field in isbns:\n for idx, (key, value) in enumerate(field[0]):\n if key == 'a':\n field[0][idx] = ('a', value.replace(\"-\", \"\").strip())", - "docstring": "Remove dashes from ISBN." - }, - { - "code": "def _add_install(self, context):\n contents = self._render_template('install.sh', context)\n self.config.setdefault('files', [])\n self._add_unique_file({\n \"path\": \"/install.sh\",\n \"contents\": contents,\n \"mode\": \"755\"\n })", - "docstring": "generates install.sh and adds it to included files" - }, - { - "code": "def handle_copy(self, dest_path, depth_infinity):\n destType, destHgPath = util.pop_path(dest_path)\n destHgPath = destHgPath.strip(\"/\")\n ui = self.provider.ui\n repo = self.provider.repo\n _logger.info(\"handle_copy %s -> %s\" % (self.localHgPath, destHgPath))\n if self.rev is None and destType == \"edit\":\n commands.copy(ui, repo, self.localHgPath, destHgPath, force=True)\n elif self.rev is None and destType == \"released\":\n self._commit(\"WsgiDAV commit (COPY %s -> %s)\" % (self.path, dest_path))\n else:\n raise DAVError(HTTP_FORBIDDEN)\n return True", - "docstring": "Handle a COPY request natively." - }, - { - "code": "def intervals(annotation, **kwargs):\n times, labels = annotation.to_interval_values()\n return mir_eval.display.labeled_intervals(times, labels, **kwargs)", - "docstring": "Plotting wrapper for labeled intervals" - }, - { - "code": "def print_big_file(self, top_n=5):\n self.assert_is_dir_and_exists()\n size_table = sorted(\n [(p, p.size) for p in self.select_file(recursive=True)],\n key=lambda x: x[1],\n reverse=True,\n )\n for p, size in size_table[:top_n]:\n print(\"{:<9} {:<9}\".format(repr_data_size(size), p.abspath))", - "docstring": "Print ``top_n`` big file in this dir." - }, - { - "code": "def render(self, *args, **kwargs):\n render_to = StringIO()\n self.output(render_to, *args, **kwargs)\n return render_to.getvalue()", - "docstring": "Renders as a str" - }, - { - "code": "def __write(self, containers, initialize=True):\n path = self._state_file\n self._assure_dir()\n try:\n flags = os.O_WRONLY | os.O_CREAT\n if initialize:\n flags |= os.O_EXCL\n with os.fdopen(os.open(path, flags), \"w\") as f:\n yaml.safe_dump(self.__base_state(containers), f)\n except OSError as err:\n if err.errno == errno.EEXIST:\n raise AlreadyInitializedError(\n \"Path %s exists. \"\n \"You may need to destroy a previous blockade.\" % path)\n raise\n except Exception:\n self._state_delete()\n raise", - "docstring": "Write the given state information into a file" - }, - { - "code": "def swap_buffers(self):\n self.frames += 1\n glfw.swap_buffers(self.window)\n self.poll_events()", - "docstring": "Swaps buffers, incement the framecounter and pull events." - }, - { - "code": "def restore_ipython(self):\n if not self.is_ipysetup:\n return\n shell_class = type(self.shell)\n shell_class.showtraceback = shell_class.default_showtraceback\n del shell_class.default_showtraceback\n self.is_ipysetup = False", - "docstring": "Restore default IPython showtraceback" - }, - { - "code": "def encode_basestring(s):\n def replace(match):\n return ESCAPE_DCT[match.group(0)]\n return '\"' + ESCAPE.sub(replace, s) + '\"'", - "docstring": "Return a JSON representation of a Python string" - }, - { - "code": "def revoke_token(self, token, callback):\n yield Task(self.data_store.remove, 'tokens', token=token)\n callback()", - "docstring": "revoke_token removes the access token from the data_store" - }, - { - "code": "def print_line(text):\n try:\n signal.signal(signal.SIGPIPE, signal.SIG_DFL)\n except ValueError:\n pass\n try:\n sys.stdout.write(text)\n if not text.endswith('\\n'):\n sys.stdout.write('\\n')\n sys.stdout.flush()\n except IOError:\n sys.exit(0)", - "docstring": "Print the given line to stdout" - }, - { - "code": "def _SendRecv():\n port = int(os.getenv(DEVSHELL_ENV, 0))\n if port == 0:\n raise NoDevshellServer()\n sock = socket.socket()\n sock.connect(('localhost', port))\n data = CREDENTIAL_INFO_REQUEST_JSON\n msg = '{0}\\n{1}'.format(len(data), data)\n sock.sendall(_helpers._to_bytes(msg, encoding='utf-8'))\n header = sock.recv(6).decode()\n if '\\n' not in header:\n raise CommunicationError('saw no newline in the first 6 bytes')\n len_str, json_str = header.split('\\n', 1)\n to_read = int(len_str) - len(json_str)\n if to_read > 0:\n json_str += sock.recv(to_read, socket.MSG_WAITALL).decode()\n return CredentialInfoResponse(json_str)", - "docstring": "Communicate with the Developer Shell server socket." - }, - { - "code": "def module(self):\n if not hasattr(self, '_module'):\n if \"__main__\" in sys.modules:\n mod = sys.modules[\"__main__\"]\n path = self.normalize_path(mod.__file__)\n if os.path.splitext(path) == os.path.splitext(self.path):\n self._module = mod\n else:\n self._module = imp.load_source('captain_script', self.path)\n return self._module", - "docstring": "load the module so we can actually run the script's function" - }, - { - "code": "def createSensorToClassifierLinks(network, sensorRegionName,\n classifierRegionName):\n network.link(sensorRegionName, classifierRegionName, \"UniformLink\", \"\",\n srcOutput=\"bucketIdxOut\", destInput=\"bucketIdxIn\")\n network.link(sensorRegionName, classifierRegionName, \"UniformLink\", \"\",\n srcOutput=\"actValueOut\", destInput=\"actValueIn\")\n network.link(sensorRegionName, classifierRegionName, \"UniformLink\", \"\",\n srcOutput=\"categoryOut\", destInput=\"categoryIn\")", - "docstring": "Create required links from a sensor region to a classifier region." - }, - { - "code": "def handle_change(self, change):\n op = change['operation']\n if op in 'append':\n self.add(len(change['value']), LatLng(*change['item']))\n elif op == 'insert':\n self.add(change['index'], LatLng(*change['item']))\n elif op == 'extend':\n points = [LatLng(*p) for p in change['items']]\n self.addAll([bridge.encode(c) for c in points])\n elif op == '__setitem__':\n self.set(change['index'], LatLng(*change['newitem']))\n elif op == 'pop':\n self.remove(change['index'])\n else:\n raise NotImplementedError(\n \"Unsupported change operation {}\".format(op))", - "docstring": "Handle changes from atom ContainerLists" - }, - { - "code": "def _stream_data_chunked(self, environ, block_size):\n if \"Darwin\" in environ.get(\"HTTP_USER_AGENT\", \"\") and environ.get(\n \"HTTP_X_EXPECTED_ENTITY_LENGTH\"\n ):\n WORKAROUND_CHUNK_LENGTH = True\n buf = environ.get(\"HTTP_X_EXPECTED_ENTITY_LENGTH\", \"0\")\n length = int(buf)\n else:\n WORKAROUND_CHUNK_LENGTH = False\n buf = environ[\"wsgi.input\"].readline()\n environ[\"wsgidav.some_input_read\"] = 1\n if buf == compat.b_empty:\n length = 0\n else:\n length = int(buf, 16)\n while length > 0:\n buf = environ[\"wsgi.input\"].read(block_size)\n yield buf\n if WORKAROUND_CHUNK_LENGTH:\n environ[\"wsgidav.some_input_read\"] = 1\n if buf == compat.b_empty:\n length = 0\n else:\n length -= len(buf)\n else:\n environ[\"wsgi.input\"].readline()\n buf = environ[\"wsgi.input\"].readline()\n if buf == compat.b_empty:\n length = 0\n else:\n length = int(buf, 16)\n environ[\"wsgidav.all_input_read\"] = 1", - "docstring": "Get the data from a chunked transfer." - }, - { - "code": "def _run_io_threads(self, handler):\n reader = ReadingThread(self.settings, handler, daemon = self.daemon,\n exc_queue = self.exc_queue)\n writter = WrittingThread(self.settings, handler, daemon = self.daemon,\n exc_queue = self.exc_queue)\n self.io_threads += [reader, writter]\n reader.start()\n writter.start()", - "docstring": "Start threads for an IOHandler." - }, - { - "code": "def from_spec(spec, kwargs=None):\n if isinstance(spec, dict):\n spec = [spec]\n stack = PreprocessorStack()\n for preprocessor_spec in spec:\n preprocessor_kwargs = copy.deepcopy(kwargs)\n preprocessor = util.get_object(\n obj=preprocessor_spec,\n predefined_objects=tensorforce.core.preprocessors.preprocessors,\n kwargs=preprocessor_kwargs\n )\n assert isinstance(preprocessor, Preprocessor)\n stack.preprocessors.append(preprocessor)\n return stack", - "docstring": "Creates a preprocessing stack from a specification dict." - }, - { - "code": "def _proc_async_iter_stream(proc, stream, buffersize=1):\n from six.moves import queue\n from threading import Thread\n def enqueue_output(proc, stream, stream_queue):\n while proc.poll() is None:\n line = stream.readline()\n stream_queue.put(line)\n for line in _textio_iterlines(stream):\n stream_queue.put(line)\n stream_queue.put(None)\n stream_queue = queue.Queue(maxsize=buffersize)\n _thread = Thread(target=enqueue_output, args=(proc, stream, stream_queue))\n _thread.daemon = True\n _thread.start()\n return stream_queue", - "docstring": "Reads output from a process in a separate thread" - }, - { - "code": "def to_utf8(y):\n out = []\n for x in y:\n if x < 0x080:\n out.append(x)\n elif x < 0x0800:\n out.append((x >> 6) | 0xC0)\n out.append((x & 0x3F) | 0x80)\n elif x < 0x10000:\n out.append((x >> 12) | 0xE0)\n out.append(((x >> 6) & 0x3F) | 0x80)\n out.append((x & 0x3F) | 0x80)\n else:\n out.append((x >> 18) | 0xF0)\n out.append((x >> 12) & 0x3F)\n out.append(((x >> 6) & 0x3F) | 0x80)\n out.append((x & 0x3F) | 0x80)\n return ''.join(map(chr, out))", - "docstring": "converts an array of integers to utf8 string" - }, - { - "code": "def disconnect(self):\n logger.debug(\"TCPTransport.disconnect()\")\n with self.lock:\n if self._socket is None:\n if self._state != \"closed\":\n self.event(DisconnectedEvent(self._dst_addr))\n self._set_state(\"closed\")\n return\n if self._hup or not self._serializer:\n self._close()\n else:\n self.send_stream_tail()", - "docstring": "Disconnect the stream gracefully." - }, - { - "code": "def filter_queryset(self, request, queryset, view):\n if not request.user.is_staff:\n filter_kwargs = {view.USER_ID_FILTER: request.user.id}\n queryset = queryset.filter(**filter_kwargs)\n return queryset", - "docstring": "Filter only for the user's ID if non-staff." - }, - { - "code": "def _get_spout(self):\n spout = topology_pb2.Spout()\n spout.comp.CopyFrom(self._get_base_component())\n self._add_out_streams(spout)\n return spout", - "docstring": "Returns Spout protobuf message" - }, - { - "code": "def write_dot (graph, ranks, path=\"graph.dot\"):\n dot = Digraph()\n for node in graph.nodes():\n dot.node(node, \"%s %0.3f\" % (node, ranks[node]))\n for edge in graph.edges():\n dot.edge(edge[0], edge[1], constraint=\"false\")\n with open(path, 'w') as f:\n f.write(dot.source)", - "docstring": "output the graph in Dot file format" - }, - { - "code": "def _create_session(self, scope):\n now = datetime.datetime.utcnow()\n if self.session is None or self.expires_at is None or now >= self.expires_at:\n if self.session:\n self.session.close()\n oauth_access_token, expires_at = self._get_oauth_access_token(\n self.enterprise_configuration.key,\n self.enterprise_configuration.secret,\n self.enterprise_configuration.degreed_user_id,\n self.enterprise_configuration.degreed_user_password,\n scope\n )\n session = requests.Session()\n session.timeout = self.SESSION_TIMEOUT\n session.headers['Authorization'] = 'Bearer {}'.format(oauth_access_token)\n session.headers['content-type'] = 'application/json'\n self.session = session\n self.expires_at = expires_at", - "docstring": "Instantiate a new session object for use in connecting with Degreed" - }, - { - "code": "def transform_courserun_description(self, content_metadata_item):\n description_with_locales = []\n content_metadata_language_code = transform_language_code(content_metadata_item.get('content_language', ''))\n for locale in self.enterprise_configuration.get_locales(default_locale=content_metadata_language_code):\n description_with_locales.append({\n 'locale': locale,\n 'value': (\n content_metadata_item['full_description'] or\n content_metadata_item['short_description'] or\n content_metadata_item['title'] or\n ''\n )\n })\n return description_with_locales", - "docstring": "Return the description of the courserun content item." - }, - { - "code": "def rsolve(A, y):\n from numpy_sugar.linalg import rsolve as _rsolve\n try:\n beta = _rsolve(A, y)\n except LinAlgError:\n msg = \"Could not converge to solve Ax=y.\"\n msg += \" Setting x to zero.\"\n warnings.warn(msg, RuntimeWarning)\n beta = zeros(A.shape[0])\n return beta", - "docstring": "Robust solve Ax=y." - }, - { - "code": "def search_news(q, start=1, count=10, wait=10, asynchronous=False, cached=False):\n service = YAHOO_NEWS\n return YahooSearch(q, start, count, service, None, wait, asynchronous, cached)", - "docstring": "Returns a Yahoo news query formatted as a YahooSearch list object." - }, - { - "code": "def enterprise_customer_uuid(self):\n try:\n enterprise_user = EnterpriseCustomerUser.objects.get(user_id=self.user.id)\n except ObjectDoesNotExist:\n LOGGER.warning(\n 'User {} has a {} assignment but is not linked to an enterprise!'.format(\n self.__class__,\n self.user.id\n ))\n return None\n except MultipleObjectsReturned:\n LOGGER.warning(\n 'User {} is linked to multiple enterprises, which is not yet supported!'.format(self.user.id)\n )\n return None\n return str(enterprise_user.enterprise_customer.uuid)", - "docstring": "Get the enterprise customer uuid linked to the user." - }, - { - "code": "def finalize(self, result=None):\n if not self.settings_path:\n return\n from django.test.utils import teardown_test_environment\n from django.db import connection\n from django.conf import settings\n self.call_plugins_method('beforeDestroyTestDb', settings, connection)\n try:\n connection.creation.destroy_test_db(\n self.old_db,\n verbosity=self.verbosity,\n )\n except Exception:\n pass\n self.call_plugins_method('afterDestroyTestDb', settings, connection)\n self.call_plugins_method(\n 'beforeTeardownTestEnv', settings, teardown_test_environment)\n teardown_test_environment()\n self.call_plugins_method('afterTeardownTestEnv', settings)", - "docstring": "Clean up any created database and schema." - }, - { - "code": "def sendToLogbook(self, fileName, logType, location=None):\n import subprocess\n success = True\n if logType == \"MCC\":\n fileString = \"\"\n if not self.imagePixmap.isNull():\n fileString = fileName + \".\" + self.imageType\n logcmd = \"xml2elog \" + fileName + \".xml \" + fileString\n process = subprocess.Popen(logcmd, shell=True)\n process.wait()\n if process.returncode != 0:\n success = False\n else:\n from shutil import copy\n path = \"/u1/\" + location.lower() + \"/physics/logbook/data/\"\n try:\n if not self.imagePixmap.isNull():\n copy(fileName + \".png\", path)\n if self.imageType == \"png\":\n copy(fileName + \".ps\", path)\n else:\n copy(fileName + \".\" + self.imageType, path)\n copy(fileName + \".xml\", path)\n except IOError as error:\n print(error)\n success = False\n return success", - "docstring": "Process log information and push to selected logbooks." - }, - { - "code": "def create_conjunction_node(self, conjunction):\n node = BNode()\n type_triple = (node, RDF.type, self.spdx_namespace.ConjunctiveLicenseSet)\n self.graph.add(type_triple)\n licenses = self.licenses_from_tree(conjunction)\n for lic in licenses:\n member_triple = (node, self.spdx_namespace.member, lic)\n self.graph.add(member_triple)\n return node", - "docstring": "Return a node representing a conjunction of licenses." - }, - { - "code": "def p_file_notice(self, f_term, predicate):\n try:\n for _, _, notice in self.graph.triples((f_term, predicate, None)):\n self.builder.set_file_notice(self.doc, six.text_type(notice))\n except CardinalityError:\n self.more_than_one_error('file notice')", - "docstring": "Sets file notice text." - }, - { - "code": "def remove(group_id, user_id):\n group = Group.query.get_or_404(group_id)\n user = User.query.get_or_404(user_id)\n if group.can_edit(current_user):\n try:\n group.remove_member(user)\n except Exception as e:\n flash(str(e), \"error\")\n return redirect(urlparse(request.referrer).path)\n flash(_('User %(user_email)s was removed from %(group_name)s group.',\n user_email=user.email, group_name=group.name), 'success')\n return redirect(urlparse(request.referrer).path)\n flash(\n _(\n 'You cannot delete users of the group %(group_name)s',\n group_name=group.name\n ),\n 'error'\n )\n return redirect(url_for('.index'))", - "docstring": "Remove user from a group." - }, - { - "code": "def env():\n if cij.ssh.env():\n cij.err(\"cij.block.env: invalid SSH environment\")\n return 1\n block = cij.env_to_dict(PREFIX, REQUIRED)\n block[\"DEV_PATH\"] = \"/dev/%s\" % block[\"DEV_NAME\"]\n cij.env_export(PREFIX, EXPORTED, block)\n return 0", - "docstring": "Verify BLOCK variables and construct exported variables" - }, - { - "code": "def highlight_project_bid(session, bid_id):\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n bid_data = {\n 'action': 'highlight'\n }\n endpoint = 'bids/{}'.format(bid_id)\n response = make_put_request(session, endpoint, headers=headers,\n params_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n json_data = response.json()\n raise BidNotHighlightedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Highlight a bid on a project" - }, - { - "code": "def _set_scores(self):\n anom_scores = {}\n self._compute_derivatives()\n derivatives_ema = utils.compute_ema(self.smoothing_factor, self.derivatives)\n for i, (timestamp, value) in enumerate(self.time_series_items):\n anom_scores[timestamp] = abs(self.derivatives[i] - derivatives_ema[i])\n stdev = numpy.std(anom_scores.values())\n if stdev:\n for timestamp in anom_scores.keys():\n anom_scores[timestamp] /= stdev\n self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))", - "docstring": "Compute anomaly scores for the time series." - }, - { - "code": "def column(self):\n line, column = self.source_buffer.decompose_position(self.begin_pos)\n return column", - "docstring": "Returns a zero-based column number of the beginning of this range." - }, - { - "code": "def sound_touch_stop(self, call_params):\n path = '/' + self.api_version + '/SoundTouchStop/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Remove soundtouch audio effects on a Call" - }, - { - "code": "async def await_event(self, event=None, timeout=None):\n if self.event_future is not None:\n raise Exception(\"Can't wait on multiple events!\")\n result = await asyncio.wait_for(self._wait_loop(event), timeout)\n return result", - "docstring": "Wait for any or specified event" - }, - { - "code": "def build_expand_node_neighborhood_by_hash(manager: Manager) -> Callable[[BELGraph, BELGraph, str], None]:\n @uni_in_place_transformation\n def expand_node_neighborhood_by_hash(universe: BELGraph, graph: BELGraph, node_hash: str) -> None:\n node = manager.get_dsl_by_hash(node_hash)\n return expand_node_neighborhood(universe, graph, node)\n return expand_node_neighborhood_by_hash", - "docstring": "Make an expand function that's bound to the manager." - }, - { - "code": "def reverse(self):\n colors = ColorList.copy(self)\n _list.reverse(colors)\n return colors", - "docstring": "Returns a reversed copy of the list." - }, - { - "code": "def paste(tid=None, review=False):\n submit(pastebin=True, tid=tid, review=False)", - "docstring": "Sends the selected exercise to the TMC pastebin." - }, - { - "code": "def sql(self, sql):\n self._cur.execute(sql)\n if sql.lower().find(\"select\") >= 0:\n matches = []\n for r in self._cur: matches.append(r)\n return matches", - "docstring": "Executes a raw SQL statement on the database." - }, - { - "code": "def _totals(self, query):\n self.add_parameters(limit=1)\n query = self._build_query(query)\n self._retrieve_data(query)\n self.url_params = None\n return int(self.request.headers[\"Total-Results\"])", - "docstring": "General method for returning total counts" - }, - { - "code": "def copy(self):\n return ColorList(\n [color(clr.r, clr.g, clr.b, clr.a, mode=\"rgb\") for clr in self],\n name=self.name,\n tags=self.tags\n )", - "docstring": "Returns a deep copy of the list." - }, - { - "code": "def set(self, target, value):\n if not self._set:\n return\n if self.path is None:\n self.set = lambda *a: None\n return None\n if self._segments[target.__class__]:\n self.get(target)\n if self._segments[target.__class__]:\n return\n parent_getter = compose(*self._getters[target.__class__][:-1])\n target = parent_getter(target)\n func = self._make_setter(self.path.split('.')[-1], target.__class__)\n func(target, value)\n def setter(target, value):\n func(parent_getter(target), value)\n self.set = setter", - "docstring": "Set the value of this attribute for the passed object." - }, - { - "code": "def new_psf_with_renormalized_array(self):\n return PSF(array=self, pixel_scale=self.pixel_scale, renormalize=True)", - "docstring": "Renormalize the PSF such that its data_vector values sum to unity." - }, - { - "code": "def bitsToString(arr):\n s = array('c','.'*len(arr))\n for i in xrange(len(arr)):\n if arr[i] == 1:\n s[i]='*'\n return s", - "docstring": "Returns a string representing a numpy array of 0's and 1's" - }, - { - "code": "def ready(self):\n from enterprise.signals import handle_user_post_save\n from django.db.models.signals import pre_migrate, post_save\n post_save.connect(handle_user_post_save, sender=self.auth_user_model, dispatch_uid=USER_POST_SAVE_DISPATCH_UID)\n pre_migrate.connect(self._disconnect_user_post_save_for_migrations)", - "docstring": "Perform other one-time initialization steps." - }, - { - "code": "def process_docstring(app, what, name, obj, options, lines):\n markdown = \"\\n\".join(lines)\n rest = m2r(markdown)\n rest.replace(\"\\r\\n\", \"\\n\")\n del lines[:]\n lines.extend(rest.split(\"\\n\"))", - "docstring": "Enable markdown syntax in docstrings" - }, - { - "code": "def Tok(kind, loc=None):\n @llrule(loc, lambda parser: [kind])\n def rule(parser):\n return parser._accept(kind)\n return rule", - "docstring": "A rule that accepts a token of kind ``kind`` and returns it, or returns None." - }, - { - "code": "def transform_args(self, *args, **kwargs):\n options = []\n for option,value in kwargs.items():\n if not option.startswith('-'):\n if len(option) == 1:\n option = '-' + option\n else:\n option = '--' + option\n if value is True:\n options.append(option)\n continue\n elif value is False:\n raise ValueError('A False value is ambiguous for option {0!r}'.format(option))\n if option[:2] == '--':\n options.append(option + '=' + str(value))\n else:\n options.extend((option, str(value)))\n return options + list(args)", - "docstring": "Transform arguments and return them as a list suitable for Popen." - }, - { - "code": "async def _on_push_data(self, data_bytes):\n logger.debug('Received chunk:\\n{}'.format(data_bytes))\n for chunk in self._chunk_parser.get_chunks(data_bytes):\n if not self._is_connected:\n if self._on_connect_called:\n self._is_connected = True\n await self.on_reconnect.fire()\n else:\n self._on_connect_called = True\n self._is_connected = True\n await self.on_connect.fire()\n container_array = json.loads(chunk)\n for inner_array in container_array:\n array_id, data_array = inner_array\n logger.debug('Chunk contains data array with id %r:\\n%r',\n array_id, data_array)\n await self.on_receive_array.fire(data_array)", - "docstring": "Parse push data and trigger events." - }, - { - "code": "def list_services(self):\n my_services = []\n for service in self.collection.find().sort('name', pymongo.ASCENDING):\n my_services.append(Service(service))\n return my_services", - "docstring": "Lists all services in mongodb storage." - }, - { - "code": "def iteritems(data, **kwargs):\n return iter(data.items(**kwargs)) if IS_PY3 else data.iteritems(**kwargs)", - "docstring": "Iterate over dict items." - }, - { - "code": "def reset_document(self):\n self.doc_version_set = False\n self.doc_comment_set = False\n self.doc_namespace_set = False\n self.doc_data_lics_set = False\n self.doc_name_set = False\n self.doc_spdx_id_set = False", - "docstring": "Resets the state to allow building new documents" - }, - { - "code": "def download_file(self, filename):\n res = self.__exchange('send(\"{filename}\")'.format(filename=filename))\n if ('unexpected' in res) or ('stdin' in res):\n log.error('Unexpected error downloading file: %s', res)\n raise Exception('Unexpected error downloading file')\n self.__write('C')\n sent_filename = self.__expect(NUL).strip()\n log.info('receiveing ' + sent_filename)\n self.__write(ACK, True)\n buf = ''\n data = ''\n chunk, buf = self.__read_chunk(buf)\n while chunk != '':\n self.__write(ACK, True)\n data = data + chunk\n chunk, buf = self.__read_chunk(buf)\n return data", - "docstring": "Download a file from device to local filesystem" - }, - { - "code": "def execute_nonstop_tasks(self, tasks_cls):\n self.execute_batch_tasks(tasks_cls,\n self.conf['sortinghat']['sleep_for'],\n self.conf['general']['min_update_delay'], False)", - "docstring": "Just a wrapper to the execute_batch_tasks method" - }, - { - "code": "def owsproxy_delegate(request):\n twitcher_url = request.registry.settings.get('twitcher.url')\n protected_path = request.registry.settings.get('twitcher.ows_proxy_protected_path', '/ows')\n url = twitcher_url + protected_path + '/proxy'\n if request.matchdict.get('service_name'):\n url += '/' + request.matchdict.get('service_name')\n if request.matchdict.get('access_token'):\n url += '/' + request.matchdict.get('service_name')\n url += '?' + urlparse.urlencode(request.params)\n LOGGER.debug(\"delegate to owsproxy: %s\", url)\n resp = requests.request(method=request.method.upper(), url=url, data=request.body,\n headers=request.headers, verify=False)\n return Response(resp.content, status=resp.status_code, headers=resp.headers)", - "docstring": "Delegates owsproxy request to external twitcher service." - }, - { - "code": "def add_tag():\n if len(sys.argv) > 1:\n tag = sys.argv[1]\n doc_mapper = DocMapper()\n if doc_mapper.is_pipe:\n count = 0\n for obj in doc_mapper.get_pipe():\n obj.add_tag(tag)\n obj.update(tags=obj.tags)\n count += 1\n print_success(\"Added tag '{}' to {} object(s)\".format(tag, count))\n else:\n print_error(\"Please use this script with pipes\")\n else:\n print_error(\"Usage: jk-add-tag \")\n sys.exit()", - "docstring": "Obtains the data from the pipe and appends the given tag." - }, - { - "code": "def _transform_item(self, content_metadata_item):\n content_metadata_type = content_metadata_item['content_type']\n transformed_item = {}\n for integrated_channel_schema_key, edx_data_schema_key in self.DATA_TRANSFORM_MAPPING.items():\n transformer = (\n getattr(\n self,\n 'transform_{content_type}_{edx_data_schema_key}'.format(\n content_type=content_metadata_type,\n edx_data_schema_key=edx_data_schema_key\n ),\n None\n )\n or\n getattr(\n self,\n 'transform_{edx_data_schema_key}'.format(\n edx_data_schema_key=edx_data_schema_key\n ),\n None\n )\n )\n if transformer:\n transformed_item[integrated_channel_schema_key] = transformer(content_metadata_item)\n else:\n try:\n transformed_item[integrated_channel_schema_key] = content_metadata_item[edx_data_schema_key]\n except KeyError:\n LOGGER.exception(\n 'Failed to transform content metadata item field [%s] for [%s]: [%s]',\n edx_data_schema_key,\n self.enterprise_customer.name,\n content_metadata_item,\n )\n return transformed_item", - "docstring": "Transform the provided content metadata item to the schema expected by the integrated channel." - }, - { - "code": "def delete_locks(context, network_ids, addresses):\n addresses_no_longer_null_routed = _find_addresses_to_be_unlocked(\n context, network_ids, addresses)\n LOG.info(\"Deleting %s lock holders on IPAddress with ids: %s\",\n len(addresses_no_longer_null_routed),\n [addr.id for addr in addresses_no_longer_null_routed])\n for address in addresses_no_longer_null_routed:\n lock_holder = None\n try:\n lock_holder = db_api.lock_holder_find(\n context, lock_id=address.lock_id, name=LOCK_NAME,\n scope=db_api.ONE)\n if lock_holder:\n db_api.lock_holder_delete(context, address, lock_holder)\n except Exception:\n LOG.exception(\"Failed to delete lock holder %s\", lock_holder)\n continue\n context.session.flush()", - "docstring": "Deletes locks for each IP address that is no longer null-routed." - }, - { - "code": "def _join_seq(d, k, v):\n if k not in d:\n d[k] = list(v)\n elif isinstance(d[k], list):\n for item in v:\n if item not in d[k]:\n d[k].insert(0, item)\n elif isinstance(d[k], string_types):\n v.append(d[k])\n d[k] = v", - "docstring": "Add a sequence value to env dict" - }, - { - "code": "def consistent_with(event, evidence):\n \"Is event consistent with the given evidence?\"\n return every(lambda (k, v): evidence.get(k, v) == v,\n event.items())", - "docstring": "Is event consistent with the given evidence?" - }, - { - "code": "def service_start(service=None, param=None):\n if service is not None:\n to_run = [\"python\", service]\n if param is not None:\n to_run += param\n return subprocess.Popen(to_run)\n return False", - "docstring": "Launch a Process, return his pid" - }, - { - "code": "def color_ramp(self, size):\n color = PALETTE.get(self.option.palette, {})\n color = color.get(self.term.colors, None)\n color_ramp = []\n if color is not None:\n ratio = len(color) / float(size)\n for i in range(int(size)):\n color_ramp.append(self.term.color(color[int(ratio * i)]))\n return color_ramp", - "docstring": "Generate a color ramp for the current screen height." - }, - { - "code": "def run(self, next_task):\n self.event.wait()\n self.task()\n self.event.clear()\n next_task.event.set()", - "docstring": "Wait for the event, run the task, trigger the next task." - }, - { - "code": "def select_arg_verify(endpoint, cert, key, pem, ca, aad, no_verify):\n if not (endpoint.lower().startswith('http')\n or endpoint.lower().startswith('https')):\n raise CLIError('Endpoint must be HTTP or HTTPS')\n usage = ('Valid syntax : --endpoint [ [ --key --cert | --pem | --aad] '\n '[ --ca | --no-verify ] ]')\n if ca and not (pem or all([key, cert])):\n raise CLIError(usage)\n if no_verify and not (pem or all([key, cert]) or aad):\n raise CLIError(usage)\n if no_verify and ca:\n raise CLIError(usage)\n if any([cert, key]) and not all([cert, key]):\n raise CLIError(usage)\n if aad and any([pem, cert, key]):\n raise CLIError(usage)\n if pem and any([cert, key]):\n raise CLIError(usage)", - "docstring": "Verify arguments for select command" - }, - { - "code": "def fetch(self):\n xml = urllib.request.urlopen(self.URL)\n tree = ET.ElementTree(file=xml)\n records = self._parse_deputies(tree.getroot())\n df = pd.DataFrame(records, columns=(\n 'congressperson_id',\n 'budget_id',\n 'condition',\n 'congressperson_document',\n 'civil_name',\n 'congressperson_name',\n 'picture_url',\n 'gender',\n 'state',\n 'party',\n 'phone_number',\n 'email'\n ))\n return self._translate(df)", - "docstring": "Fetches the list of deputies for the current term." - }, - { - "code": "def on_new(self):\n interpreter, pyserver, args = self._get_backend_parameters()\n self.setup_editor(self.tabWidget.create_new_document(\n extension='.py', interpreter=interpreter, server_script=pyserver,\n args=args))\n self.actionRun.setDisabled(True)\n self.actionConfigure_run.setDisabled(True)", - "docstring": "Add a new empty code editor to the tab widget" - }, - { - "code": "def _create_parameter(model, pid, value, sbo=None, constant=True, units=None,\n flux_udef=None):\n parameter = model.createParameter()\n parameter.setId(pid)\n parameter.setValue(value)\n parameter.setConstant(constant)\n if sbo:\n parameter.setSBOTerm(sbo)\n if units:\n parameter.setUnits(flux_udef.getId())", - "docstring": "Create parameter in SBML model." - }, - { - "code": "def parse_log_messages(self, text):\n regex = r\"commit ([0-9a-f]+)\\nAuthor: (.*?)\\n\\n(.*?)(?:\\n\\n|$)\"\n messages = re.findall(regex, text, re.DOTALL)\n parsed = []\n for commit, author, message in messages:\n parsed.append((\n commit[:10],\n re.sub(r\"\\s*<.*?>\", \"\", author),\n message.strip()\n ))\n return parsed", - "docstring": "Will parse git log messages in the 'short' format" - }, - { - "code": "def expand(self, problem):\n \"List the nodes reachable in one step from this node.\"\n return [self.child_node(problem, action)\n for action in problem.actions(self.state)]", - "docstring": "List the nodes reachable in one step from this node." - }, - { - "code": "def convert_pb_kvs(kvs, include_non_primitives=True):\n config = {}\n for kv in kvs:\n if kv.value:\n config[kv.key] = kv.value\n elif kv.serialized_value:\n if topology_pb2.JAVA_SERIALIZED_VALUE == kv.type:\n jv = _convert_java_value(kv, include_non_primitives=include_non_primitives)\n if jv is not None:\n config[kv.key] = jv\n else:\n config[kv.key] = _raw_value(kv)\n return config", - "docstring": "converts pb kvs to dict" - }, - { - "code": "def initialize():\n new_variables = set(tf.global_variables()) - ALREADY_INITIALIZED\n get_session().run(tf.variables_initializer(new_variables))\n ALREADY_INITIALIZED.update(new_variables)", - "docstring": "Initialize all the uninitialized variables in the global scope." - }, - { - "code": "def can_infect(self, event):\n if event.from_stop_I != self.stop_I:\n return False\n if not self.has_been_visited():\n return False\n else:\n time_sep = event.dep_time_ut-self.get_min_visit_time()\n if (time_sep >= self.min_transfer_time) or (event.trip_I == -1 and time_sep >= 0):\n return True\n else:\n for visit in self.visit_events:\n if (event.trip_I == visit.trip_I) and (time_sep >= 0):\n return True\n return False", - "docstring": "Whether the spreading stop can infect using this event." - }, - { - "code": "def node_exclusion_filter_builder(nodes: Iterable[BaseEntity]) -> NodePredicate:\n node_set = set(nodes)\n def exclusion_filter(_: BELGraph, node: BaseEntity) -> bool:\n return node not in node_set\n return exclusion_filter", - "docstring": "Build a filter that fails on nodes in the given list." - }, - { - "code": "def _genLoggingFilePath():\n appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp'\n appLogDir = os.path.abspath(os.path.join(\n os.environ['NTA_LOG_DIR'],\n 'numenta-logs-%s' % (os.environ['USER'],),\n appName))\n appLogFileName = '%s-%s-%s.log' % (\n appName, long(time.mktime(time.gmtime())), os.getpid())\n return os.path.join(appLogDir, appLogFileName)", - "docstring": "Generate a filepath for the calling app" - }, - { - "code": "def save_service(self, service, overwrite=True):\n name = namesgenerator.get_sane_name(service.name)\n if not name:\n name = namesgenerator.get_random_name()\n if self.collection.count_documents({'name': name}) > 0:\n name = namesgenerator.get_random_name(retry=True)\n if self.collection.count_documents({'name': name}) > 0:\n if overwrite:\n self.collection.delete_one({'name': name})\n else:\n raise Exception(\"service name already registered.\")\n self.collection.insert_one(Service(\n name=name,\n url=baseurl(service.url),\n type=service.type,\n purl=service.purl,\n public=service.public,\n auth=service.auth,\n verify=service.verify))\n return self.fetch_by_name(name=name)", - "docstring": "Stores an OWS service in mongodb." - }, - { - "code": "def format_arxiv_id(arxiv_id):\n if arxiv_id and \"/\" not in arxiv_id and \"arXiv\" not in arxiv_id:\n return \"arXiv:%s\" % (arxiv_id,)\n elif arxiv_id and '.' not in arxiv_id and arxiv_id.lower().startswith('arxiv:'):\n return arxiv_id[6:]\n else:\n return arxiv_id", - "docstring": "Properly format arXiv IDs." - }, - { - "code": "def _initiate_starttls(self, **kwargs):\n if self._tls_state == \"connected\":\n raise RuntimeError(\"Already TLS-connected\")\n kwargs[\"do_handshake_on_connect\"] = False\n logger.debug(\"Wrapping the socket into ssl\")\n self._socket = ssl.wrap_socket(self._socket, **kwargs)\n self._set_state(\"tls-handshake\")\n self._continue_tls_handshake()", - "docstring": "Initiate starttls handshake over the socket." - }, - { - "code": "def _arg_parser():\n description = \"Converts a completezip to a litezip\"\n parser = argparse.ArgumentParser(description=description)\n verbose_group = parser.add_mutually_exclusive_group()\n verbose_group.add_argument(\n '-v', '--verbose', action='store_true',\n dest='verbose', default=None,\n help=\"increase verbosity\")\n verbose_group.add_argument(\n '-q', '--quiet', action='store_false',\n dest='verbose', default=None,\n help=\"print nothing to stdout or stderr\")\n parser.add_argument(\n 'location',\n help=\"Location of the unpacked litezip\")\n return parser", - "docstring": "Factory for creating the argument parser" - }, - { - "code": "def move_to(self, thing, destination):\n \"Move a thing to a new location.\"\n thing.bump = self.some_things_at(destination, Obstacle)\n if not thing.bump:\n thing.location = destination\n for o in self.observers:\n o.thing_moved(thing)", - "docstring": "Move a thing to a new location." - }, - { - "code": "def create_new_version(\n self,\n name,\n subject,\n text='',\n template_id=None,\n html=None,\n locale=None,\n timeout=None\n ):\n if(html):\n payload = {\n 'name': name,\n 'subject': subject,\n 'html': html,\n 'text': text\n }\n else:\n payload = {\n 'name': name,\n 'subject': subject,\n 'text': text\n }\n if locale:\n url = self.TEMPLATES_SPECIFIC_LOCALE_VERSIONS_ENDPOINT % (\n template_id,\n locale\n )\n else:\n url = self.TEMPLATES_NEW_VERSION_ENDPOINT % template_id\n return self._api_request(\n url,\n self.HTTP_POST,\n payload=payload,\n timeout=timeout\n )", - "docstring": "API call to create a new version of a template" - }, - { - "code": "def _nginx_http_spec(port_spec, bridge_ip):\n server_string_spec = \"\\t server {\\n\"\n server_string_spec += \"\\t \\t {}\\n\".format(_nginx_max_file_size_string())\n server_string_spec += \"\\t \\t {}\\n\".format(_nginx_listen_string(port_spec))\n server_string_spec += \"\\t \\t {}\\n\".format(_nginx_server_name_string(port_spec))\n server_string_spec += _nginx_location_spec(port_spec, bridge_ip)\n server_string_spec += _custom_502_page()\n server_string_spec += \"\\t }\\n\"\n return server_string_spec", - "docstring": "This will output the nginx HTTP config string for specific port spec" - }, - { - "code": "def handle_transmission_error(self, learner_data, request_exception):\n try:\n sys_msg = request_exception.response.content\n except AttributeError:\n pass\n else:\n if 'user account is inactive' in sys_msg:\n ecu = EnterpriseCustomerUser.objects.get(\n enterprise_enrollments__id=learner_data.enterprise_course_enrollment_id)\n ecu.active = False\n ecu.save()\n LOGGER.warning(\n 'User %s with ID %s and email %s is a former employee of %s '\n 'and has been marked inactive in SAPSF. Now marking inactive internally.',\n ecu.username, ecu.user_id, ecu.user_email, ecu.enterprise_customer\n )\n return\n super(SapSuccessFactorsLearnerTransmitter, self).handle_transmission_error(learner_data, request_exception)", - "docstring": "Handle the case where the employee on SAPSF's side is marked as inactive." - }, - { - "code": "def _call_raxml(command_list):\n proc = subprocess.Popen(\n command_list,\n stderr=subprocess.STDOUT, \n stdout=subprocess.PIPE\n )\n comm = proc.communicate()\n return comm", - "docstring": "call the command as sps" - }, - { - "code": "def _pop(self, model):\n tags = []\n for tag in model.tags:\n if self.is_tag(tag):\n tags.append(tag)\n if tags:\n for tag in tags:\n model.tags.remove(tag)\n return tags", - "docstring": "Pop all matching tags off the model and return them." - }, - { - "code": "def from_ssl_socket(cls, ssl_socket):\n cert = cls()\n try:\n data = ssl_socket.getpeercert()\n except AttributeError:\n return cert\n logger.debug(\"Certificate data from ssl module: {0!r}\".format(data))\n if not data:\n return cert\n cert.validated = True\n cert.subject_name = data.get('subject')\n cert.alt_names = defaultdict(list)\n if 'subjectAltName' in data:\n for name, value in data['subjectAltName']:\n cert.alt_names[name].append(value)\n if 'notAfter' in data:\n tstamp = ssl.cert_time_to_seconds(data['notAfter'])\n cert.not_after = datetime.utcfromtimestamp(tstamp)\n if sys.version_info.major < 3:\n cert._decode_names()\n cert.common_names = []\n if cert.subject_name:\n for part in cert.subject_name:\n for name, value in part:\n if name == 'commonName':\n cert.common_names.append(value)\n return cert", - "docstring": "Load certificate data from an SSL socket." - }, - { - "code": "def translate_fourier(image, dx):\n N = image.shape[0]\n f = 2*np.pi*np.fft.fftfreq(N)\n kx,ky,kz = np.meshgrid(*(f,)*3, indexing='ij')\n kv = np.array([kx,ky,kz]).T\n q = np.fft.fftn(image)*np.exp(-1.j*(kv*dx).sum(axis=-1)).T\n return np.real(np.fft.ifftn(q))", - "docstring": "Translate an image in fourier-space with plane waves" - }, - { - "code": "def auto_widget(field):\n info = {\n 'widget': field.field.widget.__class__.__name__,\n 'field': field.field.__class__.__name__,\n 'name': field.name,\n }\n return [\n fmt.format(**info)\n for fmt in (\n '{field}_{widget}_{name}',\n '{field}_{name}',\n '{widget}_{name}',\n '{field}_{widget}',\n '{name}',\n '{widget}',\n '{field}',\n )\n ]", - "docstring": "Return a list of widget names for the provided field." - }, - { - "code": "def _interrupt_read(self):\n data = self._device.read(ENDPOINT, REQ_INT_LEN, timeout=TIMEOUT)\n LOGGER.debug('Read data: %r', data)\n return data", - "docstring": "Read data from device." - }, - { - "code": "async def remove_user(self, remove_user_request):\n response = hangouts_pb2.RemoveUserResponse()\n await self._pb_request('conversations/removeuser',\n remove_user_request, response)\n return response", - "docstring": "Remove a participant from a group conversation." - }, - { - "code": "def gain(abf):\n Ys=np.nan_to_num(swhlab.ap.getAvgBySweep(abf,'freq'))\n Xs=abf.clampValues(abf.dataX[int(abf.protoSeqX[1]+.01)])\n swhlab.plot.new(abf,title=\"gain function\",xlabel=\"command current (pA)\",\n ylabel=\"average inst. freq. (Hz)\")\n pylab.plot(Xs,Ys,'.-',ms=20,alpha=.5,color='b')\n pylab.axhline(0,alpha=.5,lw=2,color='r',ls=\"--\")\n pylab.margins(.1,.1)", - "docstring": "easy way to plot a gain function." - }, - { - "code": "def _extract_links(self):\n extracted = dict()\n try:\n for key, value in self.request.links.items():\n parsed = urlparse(value[\"url\"])\n fragment = \"{path}?{query}\".format(path=parsed[2], query=parsed[4])\n extracted[key] = fragment\n parsed = list(urlparse(self.self_link))\n stripped = \"&\".join(\n [\n \"%s=%s\" % (p[0], p[1])\n for p in parse_qsl(parsed[4])\n if p[0] != \"format\"\n ]\n )\n extracted[\"self\"] = urlunparse(\n [parsed[0], parsed[1], parsed[2], parsed[3], stripped, parsed[5]]\n )\n return extracted\n except KeyError:\n return None", - "docstring": "Extract self, first, next, last links from a request response" - }, - { - "code": "def namelist(self):\n names = []\n for member in self.filelist:\n names.append(member.filename)\n return names", - "docstring": "Return a list of file names in the archive." - }, - { - "code": "def _rebuildPartitionIdMap(self, partitionIdList):\n self._partitionIdMap = {}\n for row, partitionId in enumerate(partitionIdList):\n indices = self._partitionIdMap.get(partitionId, [])\n indices.append(row)\n self._partitionIdMap[partitionId] = indices", - "docstring": "Rebuilds the partition Id map using the given partitionIdList" - }, - { - "code": "def parse_override_config(namespace):\n overrides = dict()\n for config in namespace:\n kv = config.split(\"=\")\n if len(kv) != 2:\n raise Exception(\"Invalid config property format (%s) expected key=value\" % config)\n if kv[1] in ['true', 'True', 'TRUE']:\n overrides[kv[0]] = True\n elif kv[1] in ['false', 'False', 'FALSE']:\n overrides[kv[0]] = False\n else:\n overrides[kv[0]] = kv[1]\n return overrides", - "docstring": "Parse the command line for overriding the defaults" - }, - { - "code": "def process_token(self, kind, string, start, end, line):\n if self.current_block.is_comment:\n if kind == tokenize.COMMENT:\n self.current_block.add(string, start, end, line)\n else:\n self.new_noncomment(start[0], end[0])\n else:\n if kind == tokenize.COMMENT:\n self.new_comment(string, start, end, line)\n else:\n self.current_block.add(string, start, end, line)", - "docstring": "Process a single token." - }, - { - "code": "def less_or_equal(a, b, *args):\n return (\n less(a, b) or soft_equals(a, b)\n ) and (not args or less_or_equal(b, *args))", - "docstring": "Implements the '<=' operator with JS-style type coertion." - }, - { - "code": "def config(list):\n if list:\n _config = GlobalConfigManager.get_config_or_default()\n Printer.print_header('Current config:')\n dict_tabulate(_config.to_dict())", - "docstring": "Set and get the global configurations." - }, - { - "code": "def ctime(self):\n try:\n return self._stat.st_ctime\n except:\n self._stat = self.stat()\n return self.ctime", - "docstring": "Get most recent create time in timestamp." - }, - { - "code": "def child(self, offset256):\n a = bytes(self.pubkey) + offset256\n s = hashlib.sha256(a).digest()\n return self.derive_from_seed(s)", - "docstring": "Derive new private key from this key and a sha256 \"offset\"" - }, - { - "code": "def construct_formset(self):\n formset_class = self.get_formset()\n if hasattr(self, 'get_extra_form_kwargs'):\n klass = type(self).__name__\n raise DeprecationWarning(\n 'Calling {0}.get_extra_form_kwargs is no longer supported. '\n 'Set `form_kwargs` in {0}.formset_kwargs or override '\n '{0}.get_formset_kwargs() directly.'.format(klass),\n )\n return formset_class(**self.get_formset_kwargs())", - "docstring": "Returns an instance of the formset" - }, - { - "code": "def prepare_to_run(self):\n self.clock.reset()\n for e in self.entities:\n e.prepare_to_run(self.clock, self.period_count)", - "docstring": "Prepare the model for execution." - }, - { - "code": "def paragraph(separator='\\n\\n', wrap_start='', wrap_end='',\n html=False, sentences_quantity=3):\n return paragraphs(quantity=1, separator=separator, wrap_start=wrap_start,\n wrap_end=wrap_end, html=html,\n sentences_quantity=sentences_quantity)", - "docstring": "Return a random paragraph." - }, - { - "code": "def all_other_enabled_satchels(self):\n return dict(\n (name, satchel)\n for name, satchel in self.all_satchels.items()\n if name != self.name.upper() and name.lower() in map(str.lower, self.genv.services)\n )", - "docstring": "Returns a dictionary of satchels used in the current configuration, excluding ourselves." - }, - { - "code": "def _oct_to_dec(ip, check=True):\n if check and not is_oct(ip):\n raise ValueError('_oct_to_dec: invalid IP: \"%s\"' % ip)\n if isinstance(ip, int):\n ip = oct(ip)\n return int(str(ip), 8)", - "docstring": "Octal to decimal conversion." - }, - { - "code": "def decompose(self):\n contents = [i for i in self.contents]\n for i in contents:\n if isinstance(i, Tag):\n i.decompose()\n else:\n i.extract()\n self.extract()", - "docstring": "Recursively destroys the contents of this tree." - }, - { - "code": "def _spawn_heartbeat(self):\n self.spawn(self._heartbeat)\n self.spawn(self._heartbeat_timeout)", - "docstring": "This functions returns a list of jobs" - }, - { - "code": "def _uptime_plan9():\n try:\n f = open('/dev/time', 'r')\n s, ns, ct, cf = f.read().split()\n f.close()\n return float(ct) / float(cf)\n except (IOError, ValueError):\n return None", - "docstring": "Returns uptime in seconds or None, on Plan 9." - }, - { - "code": "def getAccountFromPrivateKey(self, wif):\n pub = self.publickey_from_wif(wif)\n return self.getAccountFromPublicKey(pub)", - "docstring": "Obtain account name from private key" - }, - { - "code": "def _get_parser(extra_args):\n parser = argparse.ArgumentParser(\n formatter_class=argparse.ArgumentDefaultsHelpFormatter,\n )\n dirs = appdirs.AppDirs('hangups', 'hangups')\n default_token_path = os.path.join(dirs.user_cache_dir, 'refresh_token.txt')\n parser.add_argument(\n '--token-path', default=default_token_path,\n help='path used to store OAuth refresh token'\n )\n parser.add_argument(\n '-d', '--debug', action='store_true',\n help='log detailed debugging messages'\n )\n for extra_arg in extra_args:\n parser.add_argument(extra_arg, required=True)\n return parser", - "docstring": "Return ArgumentParser with any extra arguments." - }, - { - "code": "def f7(seq):\n seen = set()\n seen_add = seen.add\n return [x for x in seq if x not in seen and not seen_add(x)]", - "docstring": "Makes a list unique" - }, - { - "code": "def python_value(self, value):\n if self.field_type == 'TEXT' and isinstance(value, str):\n return self.loads(value)\n return value", - "docstring": "Parse value from database." - }, - { - "code": "def export(self):\n content_metadata_export = {}\n content_metadata_items = self.enterprise_api.get_content_metadata(self.enterprise_customer)\n LOGGER.info('Retrieved content metadata for enterprise [%s]', self.enterprise_customer.name)\n for item in content_metadata_items:\n transformed = self._transform_item(item)\n LOGGER.info(\n 'Exporting content metadata item with plugin configuration [%s]: [%s]',\n self.enterprise_configuration,\n json.dumps(transformed, indent=4),\n )\n content_metadata_item_export = ContentMetadataItemExport(item, transformed)\n content_metadata_export[content_metadata_item_export.content_id] = content_metadata_item_export\n return OrderedDict(sorted(content_metadata_export.items()))", - "docstring": "Return the exported and transformed content metadata as a dictionary." - }, - { - "code": "def prune(self, var, value, removals):\n \"Rule out var=value.\"\n self.curr_domains[var].remove(value)\n if removals is not None: removals.append((var, value))", - "docstring": "Rule out var=value." - }, - { - "code": "def _find_last_of(self, path, finders):\r\n found_path = None\r\n for finder in finders:\r\n result = finder.find(path)\r\n if result:\r\n found_path = result\r\n return found_path", - "docstring": "Find the last occurance of the file in finders" - }, - { - "code": "def union_fill_gap(self, i):\n return Interval(min(self.start, i.start), max(self.end, i.end))", - "docstring": "Like union, but ignores whether the two intervals intersect or not" - }, - { - "code": "def update(self):\n packager = self.packager\n if packager == APT:\n self.sudo('DEBIAN_FRONTEND=noninteractive apt-get -yq update')\n elif packager == YUM:\n self.sudo('yum update')\n else:\n raise Exception('Unknown packager: %s' % (packager,))", - "docstring": "Preparse the packaging system for installations." - }, - { - "code": "def body(self):\n if not hasattr(self, '_body'):\n self._body = inspect.getsource(self.module)\n return self._body", - "docstring": "get the contents of the script" - }, - { - "code": "def __intermediate_address(self, address):\n for key in self._address_keys:\n if key in address:\n del address[key]\n return address", - "docstring": "deletes NetJSON address keys" - }, - { - "code": "def delete(self):\n with db.session.begin_nested():\n Membership.query_by_group(self).delete()\n GroupAdmin.query_by_group(self).delete()\n GroupAdmin.query_by_admin(self).delete()\n db.session.delete(self)", - "docstring": "Delete a group and all associated memberships." - }, - { - "code": "def togroups(self, user, groups):\n r = self.local_renderer\n if isinstance(groups, six.string_types):\n groups = [_.strip() for _ in groups.split(',') if _.strip()]\n for group in groups:\n r.env.username = user\n r.env.group = group\n r.sudo('groupadd --force {group}')\n r.sudo('adduser {username} {group}')", - "docstring": "Adds the user to the given list of groups." - }, - { - "code": "def create(self, server):\n for chunk in self.__cut_to_size():\n server.post(\n 'tasks_admin',\n chunk.as_payload(),\n replacements={\n 'slug': chunk.challenge.slug})", - "docstring": "Create the tasks on the server" - }, - { - "code": "def close(self):\n try:\n if self.baud != self.start_baud:\n self.__set_baudrate(self.start_baud)\n self._port.flush()\n self.__clear_buffers()\n except serial.serialutil.SerialException:\n pass\n log.debug('closing port')\n self._port.close()", - "docstring": "restores the nodemcu to default baudrate and then closes the port" - }, - { - "code": "def parse_file_provider(uri):\n providers = {'gs': job_model.P_GCS, 'file': job_model.P_LOCAL}\n provider_found = re.match(r'^([A-Za-z][A-Za-z0-9+.-]{0,29})://', uri)\n if provider_found:\n prefix = provider_found.group(1).lower()\n else:\n prefix = 'file'\n if prefix in providers:\n return providers[prefix]\n else:\n raise ValueError('File prefix not supported: %s://' % prefix)", - "docstring": "Find the file provider for a URI." - }, - { - "code": "def _document_frequency(X):\n if sp.isspmatrix_csr(X):\n return np.bincount(X.indices, minlength=X.shape[1])\n return np.diff(sp.csc_matrix(X, copy=False).indptr)", - "docstring": "Count the number of non-zero values for each feature in sparse X." - }, - { - "code": "def apply_mesh_programs(self, mesh_programs=None):\n if not mesh_programs:\n mesh_programs = [ColorProgram(), TextureProgram(), FallbackProgram()]\n for mesh in self.meshes:\n for mp in mesh_programs:\n instance = mp.apply(mesh)\n if instance is not None:\n if isinstance(instance, MeshProgram):\n mesh.mesh_program = mp\n break\n else:\n raise ValueError(\"apply() must return a MeshProgram instance, not {}\".format(type(instance)))\n if not mesh.mesh_program:\n print(\"WARING: No mesh program applied to '{}'\".format(mesh.name))", - "docstring": "Applies mesh programs to meshes" - }, - { - "code": "def _step1func(self, force, ipyclient):\n sfiles = self.paramsdict[\"sorted_fastq_path\"]\n rfiles = self.paramsdict[\"raw_fastq_path\"]\n if sfiles and rfiles:\n raise IPyradWarningExit(NOT_TWO_PATHS)\n if not (sfiles or rfiles):\n raise IPyradWarningExit(NO_SEQ_PATH_FOUND)\n if self._headers:\n if sfiles:\n print(\"\\n{}Step 1: Loading sorted fastq data to Samples\"\\\n .format(self._spacer))\n else:\n print(\"\\n{}Step 1: Demultiplexing fastq data to Samples\"\\\n .format(self._spacer))\n if self.samples:\n if not force:\n print(SAMPLES_EXIST.format(len(self.samples), self.name))\n else:\n if glob.glob(sfiles):\n self._link_fastqs(ipyclient=ipyclient, force=force)\n else:\n assemble.demultiplex.run2(self, ipyclient, force)\n else:\n if glob.glob(sfiles):\n self._link_fastqs(ipyclient=ipyclient)\n else:\n assemble.demultiplex.run2(self, ipyclient, force)", - "docstring": "hidden wrapped function to start step 1" - }, - { - "code": "def fields_types(self, tname, qstring, itemtype):\n template_name = tname + itemtype\n query_string = qstring.format(i=itemtype)\n if self.templates.get(template_name) and not self._updated(\n query_string, self.templates[template_name], template_name\n ):\n return self.templates[template_name][\"tmplt\"]\n retrieved = self._retrieve_data(query_string)\n return self._cache(retrieved, template_name)", - "docstring": "Retrieve item fields or creator types" - }, - { - "code": "def _saliency_map(self, a, image, target, labels, mask, fast=False):\n alphas = a.gradient(image, target) * mask\n if fast:\n betas = -np.ones_like(alphas)\n else:\n betas = np.sum([\n a.gradient(image, label) * mask - alphas\n for label in labels], 0)\n salmap = np.abs(alphas) * np.abs(betas) * np.sign(alphas * betas)\n idx = np.argmin(salmap)\n idx = np.unravel_index(idx, mask.shape)\n pix_sign = np.sign(alphas)[idx]\n return idx, pix_sign", - "docstring": "Implements Algorithm 3 in manuscript" - }, - { - "code": "def application_exists(self):\n response = self.ebs.describe_applications(application_names=[self.app_name])\n return len(response['DescribeApplicationsResponse']['DescribeApplicationsResult']['Applications']) > 0", - "docstring": "Returns whether or not the given app_name exists" - }, - { - "code": "def update(self, params, values):\n global_update, particles = self._update_type(params)\n if global_update:\n self.set_values(params, values)\n self.initialize()\n return\n oldargs = self._drawargs()\n for n in particles:\n self._draw_particle(self.pos[n], *listify(oldargs[n]), sign=-1)\n self.set_values(params, values)\n newargs = self._drawargs()\n for n in particles:\n self._draw_particle(self.pos[n], *listify(newargs[n]), sign=+1)", - "docstring": "Update the particles field given new parameter values" - }, - { - "code": "def main():\n config = Config()\n pipes_dir = config.get('pipes', 'directory')\n pipes_config = config.get('pipes', 'config_file')\n pipes_config_path = os.path.join(config.config_dir, pipes_config)\n if not os.path.exists(pipes_config_path):\n print_error(\"Please configure the named pipes first\")\n return\n workers = create_pipe_workers(pipes_config_path, pipes_dir)\n if workers:\n for worker in workers:\n worker.start()\n try:\n for worker in workers:\n worker.join()\n except KeyboardInterrupt:\n print_notification(\"Shutting down\")\n for worker in workers:\n worker.terminate()\n worker.join()", - "docstring": "Loads the config and handles the workers." - }, - { - "code": "def deprecated(message: str):\n assert isinstance(message, str), \"The deprecated decorator requires a message string argument.\"\n def decorator(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n warnings.warn(f\"`{func.__qualname__}` is deprecated. {message}\",\n category=ManticoreDeprecationWarning, stacklevel=2)\n return func(*args, **kwargs)\n return wrapper\n return decorator", - "docstring": "A decorator for marking functions as deprecated." - }, - { - "code": "def _transform_triple_numpy(x):\n return np.array([x.head, x.relation, x.tail], dtype=np.int64)", - "docstring": "Transform triple index into a 1-D numpy array." - }, - { - "code": "def balance(address):\n txhistory = Address.transactions(address)\n balance = 0\n for i in txhistory:\n if i.recipientId == address:\n balance += i.amount\n if i.senderId == address:\n balance -= (i.amount + i.fee)\n delegates = Delegate.delegates()\n for i in delegates:\n if address == i.address:\n forged_blocks = Delegate.blocks(i.pubkey)\n for block in forged_blocks:\n balance += (block.reward + block.totalFee)\n if balance < 0:\n height = Node.height()\n logger.fatal('Negative balance for address {0}, Nodeheight: {1)'.format(address, height))\n raise NegativeBalanceError('Negative balance for address {0}, Nodeheight: {1)'.format(address, height))\n return balance", - "docstring": "Takes a single address and returns the current balance." - }, - { - "code": "def _httplib2_init(username, password):\n obj = httplib2.Http()\n if username and password:\n obj.add_credentials(username, password)\n return obj", - "docstring": "Used to instantiate a regular HTTP request object" - }, - { - "code": "def _normalised_numpy(self):\n dx = (self.screen.width / float(len(self.points)))\n oy = (self.screen.height)\n points = np.array(self.points) - self.minimum\n points = points * 4.0 / self.extents * self.size.y\n for x, y in enumerate(points):\n yield Point((\n dx * x,\n min(oy, oy - y),\n ))", - "docstring": "Normalised data points using numpy." - }, - { - "code": "def filter(self, filter_function):\n from heronpy.streamlet.impl.filterbolt import FilterStreamlet\n filter_streamlet = FilterStreamlet(filter_function, self)\n self._add_child(filter_streamlet)\n return filter_streamlet", - "docstring": "Return a new Streamlet containing only the elements that satisfy filter_function" - }, - { - "code": "def create(self):\n query = (\n ).format(self.__tablename__, self.__key__, self.__value__)\n connection = sqlite3.connect(self.sqlite_file)\n cursor = connection.cursor()\n cursor.execute(query)\n connection.commit()", - "docstring": "Create the new table in the SQLite database" - }, - { - "code": "def _follow_next(self, url):\n response = self._json(self._get(url), 200)\n data = response['data']\n next_url = self._get_attribute(response, 'links', 'next')\n while next_url is not None:\n response = self._json(self._get(next_url), 200)\n data.extend(response['data'])\n next_url = self._get_attribute(response, 'links', 'next')\n return data", - "docstring": "Follow the 'next' link on paginated results." - }, - { - "code": "def run(self, steps=1000):\n \"Run the Environment for given number of time steps.\"\n for step in range(steps):\n if self.is_done(): return\n self.step()", - "docstring": "Run the Environment for given number of time steps." - }, - { - "code": "def paginate_dataframe(self, dataframe):\n if self.paginator is None:\n return None\n return self.paginator.paginate_dataframe(dataframe, self.request, view=self)", - "docstring": "Return a single page of results, or `None` if pagination is disabled." - }, - { - "code": "def removeLogbooks(self, type=None, logs=[]):\n if type is not None and type in self.logList:\n if len(logs) == 0 or logs == \"All\":\n del self.logList[type]\n else:\n for logbook in logs:\n if logbook in self.logList[type]:\n self.logList[type].remove(logbook)\n self.changeLogType()", - "docstring": "Remove unwanted logbooks from list." - }, - { - "code": "def terminate(self):\n if self._pool is not None:\n self._pool.terminate()\n self._pool.join()\n self._pool = None", - "docstring": "Terminate the pool immediately." - }, - { - "code": "def perform_search(\n search_term,\n user=None,\n size=10,\n from_=0,\n course_id=None):\n (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters(\n user=user,\n course_id=course_id\n )\n searcher = SearchEngine.get_search_engine(getattr(settings, \"COURSEWARE_INDEX_NAME\", \"courseware_index\"))\n if not searcher:\n raise NoSearchEngineError(\"No search engine specified in settings.SEARCH_ENGINE\")\n results = searcher.search_string(\n search_term,\n field_dictionary=field_dictionary,\n filter_dictionary=filter_dictionary,\n exclude_dictionary=exclude_dictionary,\n size=size,\n from_=from_,\n doc_type=\"courseware_content\",\n )\n for result in results[\"results\"]:\n result[\"data\"] = SearchResultProcessor.process_result(result[\"data\"], search_term, user)\n results[\"access_denied_count\"] = len([r for r in results[\"results\"] if r[\"data\"] is None])\n results[\"results\"] = [r for r in results[\"results\"] if r[\"data\"] is not None]\n return results", - "docstring": "Call the search engine with the appropriate parameters" - }, - { - "code": "def extend(self, iterable):\n if not hasattr(self, \"_dict\") or self._dict is None:\n self._dict = {}\n _dict = self._dict\n current_length = len(self)\n list.extend(self, iterable)\n for i, obj in enumerate(islice(self, current_length, None),\n current_length):\n the_id = obj.id\n if the_id not in _dict:\n _dict[the_id] = i\n else:\n self = self[:current_length]\n self._check(the_id)\n raise ValueError(\"id '%s' at index %d is non-unique. \"\n \"Is it present twice?\" % (str(the_id), i))", - "docstring": "extend list by appending elements from the iterable" - }, - { - "code": "def cleanwrap(func):\n def enc(self, *args, **kwargs):\n return (func(self, item, **kwargs) for item in args)\n return enc", - "docstring": "Wrapper for Zotero._cleanup" - }, - { - "code": "def file_compile(self, path):\n log.info('Compile '+path)\n cmd = 'node.compile(\"%s\")' % path\n res = self.__exchange(cmd)\n log.info(res)\n return res", - "docstring": "Compiles a file specified by path on the device" - }, - { - "code": "def tz(self):\n if not self._tz:\n self._tz = tzlocal.get_localzone().zone\n return self._tz", - "docstring": "Return the timezone. If none is set use system timezone" - }, - { - "code": "def summarize_edge_filter(graph: BELGraph, edge_predicates: EdgePredicates) -> None:\n passed = count_passed_edge_filter(graph, edge_predicates)\n print('{}/{} edges passed {}'.format(\n passed, graph.number_of_edges(),\n (\n ', '.join(edge_filter.__name__ for edge_filter in edge_predicates)\n if isinstance(edge_predicates, Iterable) else\n edge_predicates.__name__\n )\n ))", - "docstring": "Print a summary of the number of edges passing a given set of filters." - }, - { - "code": "def _make_environment(self, inputs, outputs, mounts):\n env = {}\n env.update(providers_util.get_file_environment_variables(inputs))\n env.update(providers_util.get_file_environment_variables(outputs))\n env.update(providers_util.get_file_environment_variables(mounts))\n return env", - "docstring": "Return a dictionary of environment variables for the container." - }, - { - "code": "def to_jupyter(graph: BELGraph, chart: Optional[str] = None) -> Javascript:\n with open(os.path.join(HERE, 'render_with_javascript.js'), 'rt') as f:\n js_template = Template(f.read())\n return Javascript(js_template.render(**_get_context(graph, chart=chart)))", - "docstring": "Render the graph as JavaScript in a Jupyter Notebook." - }, - { - "code": "def _generateRangeDescription(self, ranges):\n desc = \"\"\n numRanges = len(ranges)\n for i in xrange(numRanges):\n if ranges[i][0] != ranges[i][1]:\n desc += \"%.2f-%.2f\" % (ranges[i][0], ranges[i][1])\n else:\n desc += \"%.2f\" % (ranges[i][0])\n if i < numRanges - 1:\n desc += \", \"\n return desc", - "docstring": "generate description from a text description of the ranges" - }, - { - "code": "def word_to_id(self, word):\n if word in self._vocab:\n return self._vocab[word]\n else:\n return self._unk_id", - "docstring": "Returns the integer id of a word string." - }, - { - "code": "async def easter_egg(self, easter_egg_request):\n response = hangouts_pb2.EasterEggResponse()\n await self._pb_request('conversations/easteregg',\n easter_egg_request, response)\n return response", - "docstring": "Send an easter egg event to a conversation." - }, - { - "code": "def load():\n for operator in operators:\n module, symbols = operator[0], operator[1:]\n path = 'grappa.operators.{}'.format(module)\n operator = __import__(path, None, None, symbols)\n for symbol in symbols:\n Engine.register(getattr(operator, symbol))", - "docstring": "Loads the built-in operators into the global test engine." - }, - { - "code": "def __register_library(self, module_name: str, attr: str, fallback: str = None):\n try:\n module = importlib.import_module(module_name)\n except ImportError:\n if fallback is not None:\n module = importlib.import_module(fallback)\n self.__logger.warn(module_name + \" not available: Replaced with \" + fallback)\n else:\n self.__logger.warn(module_name + \" not available: No Replacement Specified\")\n if not attr in dir(self.__sketch):\n setattr(self.__sketch, attr, module)\n else:\n self.__logger.warn(attr +\" could not be imported as it's label is already used in the sketch\")", - "docstring": "Inserts Interpreter Library of imports into sketch in a very non-consensual way" - }, - { - "code": "def _create_archive(self):\n self.status = u'270 creating final encrypted backup of cleansed attachments'\n return self._create_encrypted_zip(source='clean', fs_target_dir=self.container.fs_archive_cleansed)", - "docstring": "creates an encrypted archive of the dropbox outside of the drop directory." - }, - { - "code": "def put(self, key):\n self.client.write(self._key_path(key['name']), **key)\n return self._key_path(key['name'])", - "docstring": "Put and return the only unique identifier possible, its path" - }, - { - "code": "def read_json(path):\n if path.startswith((\"http://\", \"https://\")):\n try:\n return json.loads(urlopen(path).read().decode())\n except HTTPError:\n raise FileNotFoundError(\"%s not found\", path)\n elif path.startswith(\"s3://\"):\n bucket = get_boto3_bucket(path.split(\"/\")[2])\n key = \"/\".join(path.split(\"/\")[3:])\n for obj in bucket.objects.filter(Prefix=key):\n if obj.key == key:\n return json.loads(obj.get()['Body'].read().decode())\n raise FileNotFoundError(\"%s not found\", path)\n else:\n try:\n with open(path, \"r\") as src:\n return json.loads(src.read())\n except:\n raise FileNotFoundError(\"%s not found\", path)", - "docstring": "Read local or remote." - }, - { - "code": "def put_async(self, url, name, data, callback=None, params=None, headers=None):\n if name is None: name = ''\n params = params or {}\n headers = headers or {}\n endpoint = self._build_endpoint_url(url, name)\n self._authenticate(params, headers)\n data = json.dumps(data, cls=JSONEncoder)\n process_pool.apply_async(make_put_request,\n args=(endpoint, data, params, headers),\n callback=callback)", - "docstring": "Asynchronous PUT request with the process pool." - }, - { - "code": "def send_digits(self, call_params):\n path = '/' + self.api_version + '/SendDigits/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Send digits to a Call" - }, - { - "code": "def hide_variables_window(self):\n if self.var_window is not None:\n self.var_window.window.destroy()\n self.var_window = None", - "docstring": "Hide the variables window" - }, - { - "code": "def _mouse_pointer_moved(self, x, y):\n self._namespace['MOUSEX'] = x\n self._namespace['MOUSEY'] = y", - "docstring": "GUI callback for mouse moved" - }, - { - "code": "def verify_signature(self, data):\n data = self._remove_magic(data)\n data = urlsafe_nopadding_b64decode(data)\n options = self._read_header(data)\n data = self._add_magic(data)\n self._unsign_data(data, options)", - "docstring": "Verify sealed data signature" - }, - { - "code": "def html_singleAll(self,template=\"basic\"):\n for fname in smartSort(self.cells):\n if template==\"fixed\":\n self.html_single_fixed(fname)\n else:\n self.html_single_basic(fname)", - "docstring": "generate a data view for every ABF in the project folder." - }, - { - "code": "def settings(self):\n stmt = \"select {fields} from pg_settings\".format(fields=', '.join(SETTINGS_FIELDS))\n settings = []\n for row in self._iter_results(stmt):\n row['setting'] = self._vartype_map[row['vartype']](row['setting'])\n settings.append(Settings(**row))\n return settings", - "docstring": "Returns settings from the server." - }, - { - "code": "def things_near(self, location, radius=None):\n \"Return all things within radius of location.\"\n if radius is None: radius = self.perceptible_distance\n radius2 = radius * radius\n return [thing for thing in self.things\n if distance2(location, thing.location) <= radius2]", - "docstring": "Return all things within radius of location." - }, - { - "code": "def _parse_args(args):\n parser = argparse.ArgumentParser(description=\"Remove and/or rearrange \"\n + \"sections from each line of a file(s).\",\n usage=_usage()[len('usage: '):])\n parser.add_argument('-b', \"--bytes\", action='store', type=lst, default=[],\n help=\"Bytes to select\")\n parser.add_argument('-c', \"--chars\", action='store', type=lst, default=[],\n help=\"Character to select\")\n parser.add_argument('-f', \"--fields\", action='store', type=lst, default=[],\n help=\"Fields to select\")\n parser.add_argument('-d', \"--delimiter\", action='store', default=\"\\t\",\n help=\"Sets field delimiter(default is TAB)\")\n parser.add_argument('-e', \"--regex\", action='store_true',\n help='Enable regular expressions to be used as input '+\n 'delimiter')\n parser.add_argument('-s', '--skip', action='store_true',\n help=\"Skip lines that do not contain input delimiter.\")\n parser.add_argument('-S', \"--separator\", action='store', default=\"\\t\",\n help=\"Sets field separator for output.\")\n parser.add_argument('file', nargs='*', default=\"-\",\n help=\"File(s) to cut\")\n return parser.parse_args(args)", - "docstring": "Setup argparser to process arguments and generate help" - }, - { - "code": "def _read_header(self, data):\n version = self._read_version(data)\n version_info = self._get_version_info(version)\n header_data = data[:version_info['header_size']]\n header = version_info['header']\n header = header._make(\n unpack(version_info['header_format'], header_data))\n header = dict(header._asdict())\n flags = list(\"{0:0>8b}\".format(header['flags']))\n flags = dict(version_info['flags']._make(flags)._asdict())\n flags = dict((i, bool(int(j))) for i, j in flags.iteritems())\n header['flags'] = flags\n timestamp = None\n if flags['timestamp']:\n ts_start = version_info['header_size']\n ts_end = ts_start + version_info['timestamp_size']\n timestamp_data = data[ts_start:ts_end]\n timestamp = unpack(\n version_info['timestamp_format'], timestamp_data)[0]\n header['info'] = {'timestamp': timestamp}\n return header", - "docstring": "Read header from data" - }, - { - "code": "def _mutagen_fields_to_single_value(metadata):\n\treturn dict((k, v[0]) for k, v in metadata.items() if v)", - "docstring": "Replace mutagen metadata field list values in mutagen tags with the first list value." - }, - { - "code": "def load(self):\n con = sqlite3.connect(self.tmp_cookie_file)\n cur = con.cursor()\n try:\n cur.execute('SELECT host_key, path, secure, expires_utc, name, value, encrypted_value '\n 'FROM cookies WHERE host_key like \"%{}%\";'.format(self.domain_name))\n except sqlite3.OperationalError:\n cur.execute('SELECT host_key, path, is_secure, expires_utc, name, value, encrypted_value '\n 'FROM cookies WHERE host_key like \"%{}%\";'.format(self.domain_name))\n cj = http.cookiejar.CookieJar()\n for item in cur.fetchall():\n host, path, secure, expires, name = item[:5]\n value = self._decrypt(item[5], item[6])\n c = create_cookie(host, path, secure, expires, name, value)\n cj.set_cookie(c)\n con.close()\n return cj", - "docstring": "Load sqlite cookies into a cookiejar" - }, - { - "code": "def find_bump(target, tag):\n tmp = tag.split(\".\")\n existing = [intify(basename(f)) for f in glob(join(target, \"[0-9]*.md\"))]\n latest = max(existing)\n if int(tmp[0]) > latest[0]:\n return \"major\"\n elif int(tmp[1]) > latest[1]:\n return \"minor\"\n else:\n return \"patch\"", - "docstring": "Identify the kind of release by comparing to existing ones." - }, - { - "code": "def _instructions_changed(self, change):\n if change.adds():\n for index, instruction in change.items():\n if isinstance(instruction, dict):\n in_row = self._parser.instruction_in_row(self, instruction)\n self.instructions[index] = in_row\n else:\n instruction.transfer_to_row(self)", - "docstring": "Call when there is a change in the instructions." - }, - { - "code": "def _recv(self) -> str:\n buf, left, right = self.__readline_and_count()\n bufl = [buf]\n while left != right:\n buf, l, r = self.__readline_and_count()\n bufl.append(buf)\n left += l\n right += r\n buf = ''.join(bufl).strip()\n logger.debug('<%s', buf)\n if '(error' in bufl[0]:\n raise Exception(f\"Error in smtlib: {bufl[0]}\")\n return buf", - "docstring": "Reads the response from the solver" - }, - { - "code": "def clubStaff(self):\n method = 'GET'\n url = 'club/stats/staff'\n rc = self.__request__(method, url)\n return rc", - "docstring": "Return staff in your club." - }, - { - "code": "def error(message):\n global parser\n print (_(\"Error: \") + message)\n print ()\n parser.print_help()\n sys.exit()", - "docstring": "Prints an error message, the help message and quits" - }, - { - "code": "def concat(a, b):\n \"Same as a + b, for a and b sequences.\"\n if not hasattr(a, '__getitem__'):\n msg = \"'%s' object can't be concatenated\" % type(a).__name__\n raise TypeError(msg)\n return a + b", - "docstring": "Same as a + b, for a and b sequences." - }, - { - "code": "def reflect(self, x0, y0, x, y):\n rx = x0 - (x-x0)\n ry = y0 - (y-y0)\n return rx, ry", - "docstring": "Reflects the point x, y through origin x0, y0." - }, - { - "code": "def pad_cells(table):\n col_sizes = [max(map(len, col)) for col in zip(*table)]\n for row in table:\n for cell_num, cell in enumerate(row):\n row[cell_num] = pad_to(cell, col_sizes[cell_num])\n return table", - "docstring": "Pad each cell to the size of the largest cell in its column." - }, - { - "code": "def authorize(self):\n response = self.client.login(username=self.USERNAME, \n password=self.PASSWORD)\n self.assertTrue(response)\n self.authed = True", - "docstring": "Authenticates the superuser account via the web login." - }, - { - "code": "def downbeat(annotation, sr=22050, length=None, **kwargs):\n beat_click = mkclick(440 * 2, sr=sr)\n downbeat_click = mkclick(440 * 3, sr=sr)\n intervals, values = annotation.to_interval_values()\n beats, downbeats = [], []\n for time, value in zip(intervals[:, 0], values):\n if value['position'] == 1:\n downbeats.append(time)\n else:\n beats.append(time)\n if length is None:\n length = int(sr * np.max(intervals)) + len(beat_click) + 1\n y = filter_kwargs(mir_eval.sonify.clicks,\n np.asarray(beats),\n fs=sr, length=length, click=beat_click)\n y += filter_kwargs(mir_eval.sonify.clicks,\n np.asarray(downbeats),\n fs=sr, length=length, click=downbeat_click)\n return y", - "docstring": "Sonify beats and downbeats together." - }, - { - "code": "def all_events(cls):\n all_evts = set()\n for cls, evts in cls.__all_events__.items():\n all_evts.update(evts)\n return all_evts", - "docstring": "Return all events that all subclasses have so far registered to publish." - }, - { - "code": "def _ensure_started(self):\n if self._process and self._process.poll() is None:\n return\n if not getattr(self, \"_cmd\"):\n raise RuntimeError(\"Player command is not configured\")\n log.debug(\"Starting playback command: %r\", self._cmd)\n self._process = SilentPopen(self._cmd)\n self._post_start()", - "docstring": "Ensure player backing process is started" - }, - { - "code": "def _get_filtered_mounts(mounts, mount_param_type):\n return set([mount for mount in mounts if isinstance(mount, mount_param_type)])", - "docstring": "Helper function to return an appropriate set of mount parameters." - }, - { - "code": "def from_spec(spec, kwargs):\n agent = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.agents.agents,\n kwargs=kwargs\n )\n assert isinstance(agent, Agent)\n return agent", - "docstring": "Creates an agent from a specification dict." - }, - { - "code": "def _merge(options, name, bases, default=None):\n result = None\n for base in bases:\n if base is None:\n continue\n value = getattr(base, name, None)\n if value is None:\n continue\n result = utils.cons(result, value)\n value = options.get(name)\n if value is not None:\n result = utils.cons(result, value)\n return result or default", - "docstring": "Merges a named option collection." - }, - { - "code": "def calculate_score(self, node: BaseEntity) -> float:\n score = (\n self.graph.nodes[node][self.tag]\n if self.tag in self.graph.nodes[node] else\n self.default_score\n )\n for predecessor, _, d in self.graph.in_edges(node, data=True):\n if d[RELATION] in CAUSAL_INCREASE_RELATIONS:\n score += self.graph.nodes[predecessor][self.tag]\n elif d[RELATION] in CAUSAL_DECREASE_RELATIONS:\n score -= self.graph.nodes[predecessor][self.tag]\n return score", - "docstring": "Calculate the new score of the given node." - }, - { - "code": "def _iterate_namespace_models(self, **kwargs) -> Iterable:\n return tqdm(\n self._get_query(self.namespace_model),\n total=self._count_model(self.namespace_model),\n **kwargs\n )", - "docstring": "Return an iterator over the models to be converted to the namespace." - }, - { - "code": "def createcolorbar(cmap, norm):\n cax, kw = matplotlib.colorbar.make_axes(matplotlib.pyplot.gca())\n c = matplotlib.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)\n return c", - "docstring": "Create a colourbar with limits of lwr and upr" - }, - { - "code": "def validate_date(date_text):\n try:\n if int(date_text) < 0:\n return True\n except ValueError:\n pass\n try:\n datetime.strptime(date_text, '%Y-%m-%d')\n return True\n except ValueError:\n pass\n raise ValueError('Dates must be negative integers or YYYY-MM-DD in the past.')", - "docstring": "Return True if valid, raise ValueError if not" - }, - { - "code": "def nmap(nmap_args, ips):\n config = Config()\n arguments = ['nmap', '-Pn']\n arguments.extend(ips)\n arguments.extend(nmap_args)\n output_file = ''\n now = datetime.datetime.now()\n if not '-oA' in nmap_args:\n output_name = 'nmap_jackal_{}'.format(now.strftime(\"%Y-%m-%d %H:%M\"))\n path_name = os.path.join(config.get('nmap', 'directory'), output_name)\n print_notification(\"Writing output of nmap to {}\".format(path_name))\n if not os.path.exists(config.get('nmap', 'directory')):\n os.makedirs(config.get('nmap', 'directory'))\n output_file = path_name + '.xml'\n arguments.extend(['-oA', path_name])\n else:\n output_file = nmap_args[nmap_args.index('-oA') + 1] + '.xml'\n print_notification(\"Starting nmap\")\n subprocess.call(arguments)\n with open(output_file, 'r') as f:\n return f.read()", - "docstring": "Start an nmap process with the given args on the given ips." - }, - { - "code": "def merge(*args):\n ret = []\n for arg in args:\n if isinstance(arg, list) or isinstance(arg, tuple):\n ret += list(arg)\n else:\n ret.append(arg)\n return ret", - "docstring": "Implements the 'merge' operator for merging lists." - }, - { - "code": "def make_sentence (sent_text):\n lex = []\n idx = 0\n for word in sent_text:\n if len(word) > 0:\n if (idx > 0) and not (word[0] in \",.:;!?-\\\"'\"):\n lex.append(\" \")\n lex.append(word)\n idx += 1\n return \"\".join(lex)", - "docstring": "construct a sentence text, with proper spacing" - }, - { - "code": "def publish(context):\n header('Recording changes...')\n run('git add -A')\n header('Displaying changes...')\n run('git -c color.status=always status')\n if not click.confirm('\\nContinue publishing'):\n run('git reset HEAD --')\n abort(context)\n header('Saving changes...')\n try:\n run('git commit -m \"{message}\"'.format(\n message='Publishing {}'.format(choose_commit_emoji())\n ), capture=True)\n except subprocess.CalledProcessError as e:\n if 'nothing to commit' not in e.stdout:\n raise\n else:\n click.echo('Nothing to commit.')\n header('Pushing to GitHub...')\n branch = get_branch()\n run('git push origin {branch}:{branch}'.format(branch=branch))\n pr_link = get_pr_link(branch)\n if pr_link:\n click.launch(pr_link)", - "docstring": "Saves changes and sends them to GitHub" - }, - { - "code": "def unmap_memory_callback(self, start, size):\n logger.info(f\"Unmapping memory from {hex(start)} to {hex(start + size)}\")\n mask = (1 << 12) - 1\n if (start & mask) != 0:\n logger.error(\"Memory to be unmapped is not aligned to a page\")\n if (size & mask) != 0:\n size = ((size >> 12) + 1) << 12\n logger.warning(\"Forcing unmap size to align to a page\")\n self._emu.mem_unmap(start, size)", - "docstring": "Unmap Unicorn maps when Manticore unmaps them" - }, - { - "code": "def add_path(self, nodes, **attr):\n if nx.__version__[0] == \"1\":\n return super().add_path(nodes, **attr)\n else:\n return nx.add_path(self, nodes, **attr)", - "docstring": "In replacement for Deprecated add_path method" - }, - { - "code": "def make_undirected(self):\n \"Make a digraph into an undirected graph by adding symmetric edges.\"\n for a in self.dict.keys():\n for (b, distance) in self.dict[a].items():\n self.connect1(b, a, distance)", - "docstring": "Make a digraph into an undirected graph by adding symmetric edges." - }, - { - "code": "def answer_display(self, s=''):\n padding = len(max(self.questions.keys(), key=len)) + 5\n for key in list(self.answers.keys()):\n s += '{:>{}} : {}\\n'.format(key, padding, self.answers[key])\n return s", - "docstring": "Helper method for displaying the answers so far." - }, - { - "code": "def stop(self):\n with self._status_lock:\n if self._running:\n assert self._observer is not None\n self._observer.stop()\n self._running = False\n self._origin_mapped_data = dict()", - "docstring": "Stops monitoring the predefined directory." - }, - { - "code": "def run_global_hook(hook_name, *args):\n hook_finder = HookFinder(get_global_hook_path())\n hook = hook_finder(hook_name)\n if hook:\n hook.run(*args)", - "docstring": "Attempt to run a global hook by name with args" - }, - { - "code": "def single(method):\n @functools.wraps(method)\n def single(self, address, value=None):\n address = urllib.parse.unquote_plus(address)\n try:\n error = NO_PROJECT_ERROR\n if not self.project:\n raise ValueError\n error = BAD_ADDRESS_ERROR\n ed = editor.Editor(address, self.project)\n if value is None:\n error = BAD_GETTER_ERROR\n result = method(self, ed)\n else:\n error = BAD_SETTER_ERROR\n result = method(self, ed, value)\n result = {'value': result}\n except Exception as e:\n traceback.print_exc()\n msg = '%s\\n%s' % (error.format(**locals()), e)\n result = {'error': msg}\n return flask.jsonify(result)\n return single", - "docstring": "Decorator for RestServer methods that take a single address" - }, - { - "code": "def _escape_char(c, escape_char=ESCAPE_CHAR):\n buf = []\n for byte in c.encode('utf8'):\n buf.append(escape_char)\n buf.append('%X' % _ord(byte))\n return ''.join(buf)", - "docstring": "Escape a single character" - }, - { - "code": "def update_item(filename, item, uuid):\n with atomic_write(os.fsencode(str(filename))) as temp_file:\n with open(os.fsencode(str(filename))) as products_file:\n products_data = json.load(products_file)\n if 'products' in products_data[-1]:\n [products_data[i][\"products\"][0].update(item) for (\n i, j) in enumerate(products_data) if j[\"uuid\"] == str(uuid)]\n else:\n [products_data[i].update(item) for (i, j) in enumerate(\n products_data) if j[\"uuid\"] == str(uuid)]\n json.dump(products_data, temp_file)\n return True", - "docstring": "Update entry by UUID in the JSON file" - }, - { - "code": "def merge_text(events):\n text = []\n for obj in events:\n if obj['type'] == TEXT:\n text.append(obj['text'])\n else:\n if text:\n yield {'type': TEXT, 'text': ''.join(text)}\n text.clear()\n yield obj\n if text:\n yield {'type': TEXT, 'text': ''.join(text)}", - "docstring": "merges each run of successive text events into one text event" - }, - { - "code": "def format_parameters(self, **kwargs):\n req_data = {}\n for k, v in kwargs.items():\n if isinstance(v, (list, tuple)):\n k = k + '[]'\n req_data[k] = v\n return req_data", - "docstring": "Properly formats array types" - }, - { - "code": "def create_n_gram_df(df, n_pad):\n n_pad_2 = int((n_pad - 1)/2)\n for i in range(n_pad_2):\n df['char-{}'.format(i+1)] = df['char'].shift(i + 1)\n df['type-{}'.format(i+1)] = df['type'].shift(i + 1)\n df['char{}'.format(i+1)] = df['char'].shift(-i - 1)\n df['type{}'.format(i+1)] = df['type'].shift(-i - 1)\n return df[n_pad_2: -n_pad_2]", - "docstring": "Given input dataframe, create feature dataframe of shifted characters" - }, - { - "code": "def check_important_sub_metrics(self, sub_metric):\n if not self.important_sub_metrics:\n return False\n if sub_metric in self.important_sub_metrics:\n return True\n items = sub_metric.split('.')\n if items[-1] in self.important_sub_metrics:\n return True\n return False", - "docstring": "check whether the given sub metric is in important_sub_metrics list" - }, - { - "code": "def disable():\n root = platform.config_root()\n try:\n os.makedirs(root)\n except OSError:\n pass\n filename = os.path.join(root, 'keyringrc.cfg')\n if os.path.exists(filename):\n msg = \"Refusing to overwrite {filename}\".format(**locals())\n raise RuntimeError(msg)\n with open(filename, 'w') as file:\n file.write('[backend]\\ndefault-keyring=keyring.backends.null.Keyring')", - "docstring": "Configure the null keyring as the default." - }, - { - "code": "def __execute_initial_load(self):\n if self.conf['phases']['panels']:\n tasks_cls = [TaskPanels, TaskPanelsMenu]\n self.execute_tasks(tasks_cls)\n if self.conf['phases']['identities']:\n tasks_cls = [TaskInitSortingHat]\n self.execute_tasks(tasks_cls)\n logger.info(\"Loading projects\")\n tasks_cls = [TaskProjects]\n self.execute_tasks(tasks_cls)\n logger.info(\"Done\")\n return", - "docstring": "Tasks that should be done just one time" - }, - { - "code": "def populations_slices(particles, num_pop_list):\n slices = []\n i_prev = 0\n for num_pop in num_pop_list:\n slices.append(slice(i_prev, i_prev + num_pop))\n i_prev += num_pop\n return slices", - "docstring": "2-tuple of slices for selection of two populations." - }, - { - "code": "def generate(request):\n models.DataItem.create(\n content=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20))\n )\n return muffin.HTTPFound('/')", - "docstring": "Create a new DataItem." - }, - { - "code": "def _apply_record_predicates(self, i, r,\n summarize=False,\n report_unexpected_exceptions=True,\n context=None):\n for predicate, code, message, modulus in self._record_predicates:\n if i % modulus == 0:\n rdict = self._as_dict(r)\n try:\n valid = predicate(rdict)\n if not valid:\n p = {'code': code}\n if not summarize:\n p['message'] = message\n p['row'] = i + 1\n p['record'] = r\n if context is not None: p['context'] = context\n yield p\n except Exception as e:\n if report_unexpected_exceptions:\n p = {'code': UNEXPECTED_EXCEPTION}\n if not summarize:\n p['message'] = MESSAGES[UNEXPECTED_EXCEPTION] % (e.__class__.__name__, e)\n p['row'] = i + 1\n p['record'] = r\n p['exception'] = e\n p['function'] = '%s: %s' % (predicate.__name__,\n predicate.__doc__)\n if context is not None: p['context'] = context\n yield p", - "docstring": "Apply record predicates on `r`." - }, - { - "code": "def __parse_parms(self):\n args = list()\n for key, val in self.__parm.items():\n key = key.replace(\"FIO_\", \"\").lower()\n if key == \"runtime\":\n args.append(\"--time_based\")\n if val is None:\n args.append(\"--%s\" % key)\n else:\n args.append(\"--%s=%s\" % (key, val))\n return args", - "docstring": "Translate dict parameters to string" - }, - { - "code": "async def play(self, track_index: int = 0, ignore_shuffle: bool = False):\r\n if self.repeat and self.current:\r\n self.queue.append(self.current)\r\n self.previous = self.current\r\n self.current = None\r\n self.position = 0\r\n self.paused = False\r\n if not self.queue:\r\n await self.stop()\r\n await self._lavalink.dispatch_event(QueueEndEvent(self))\r\n else:\r\n if self.shuffle and not ignore_shuffle:\r\n track = self.queue.pop(randrange(len(self.queue)))\r\n else:\r\n track = self.queue.pop(min(track_index, len(self.queue) - 1))\r\n self.current = track\r\n await self._lavalink.ws.send(op='play', guildId=self.guild_id, track=track.track)\r\n await self._lavalink.dispatch_event(TrackStartEvent(self, track))", - "docstring": "Plays the first track in the queue, if any or plays a track from the specified index in the queue." - }, - { - "code": "def write_output_metadata(output_params):\n if \"path\" in output_params:\n metadata_path = os.path.join(output_params[\"path\"], \"metadata.json\")\n logger.debug(\"check for output %s\", metadata_path)\n try:\n existing_params = read_output_metadata(metadata_path)\n logger.debug(\"%s exists\", metadata_path)\n logger.debug(\"existing output parameters: %s\", pformat(existing_params))\n existing_tp = existing_params[\"pyramid\"]\n current_params = params_to_dump(output_params)\n logger.debug(\"current output parameters: %s\", pformat(current_params))\n current_tp = BufferedTilePyramid(**current_params[\"pyramid\"])\n if existing_tp != current_tp:\n raise MapcheteConfigError(\n \"pyramid definitions between existing and new output do not match: \"\n \"%s != %s\" % (existing_tp, current_tp)\n )\n existing_format = existing_params[\"driver\"][\"format\"]\n current_format = current_params[\"driver\"][\"format\"]\n if existing_format != current_format:\n raise MapcheteConfigError(\n \"existing output format does not match new output format: \"\n \"%s != %s\" % (\n (existing_format, current_format)\n )\n )\n except FileNotFoundError:\n logger.debug(\"%s does not exist\", metadata_path)\n dump_params = params_to_dump(output_params)\n write_json(metadata_path, dump_params)\n else:\n logger.debug(\"no path parameter found\")", - "docstring": "Dump output JSON and verify parameters if output metadata exist." - }, - { - "code": "def _add_out_streams(self, spbl):\n if self.outputs is None:\n return\n output_map = self._sanitize_outputs()\n for stream_id, out_fields in output_map.items():\n out_stream = spbl.outputs.add()\n out_stream.stream.CopyFrom(self._get_stream_id(self.name, stream_id))\n out_stream.schema.CopyFrom(self._get_stream_schema(out_fields))", - "docstring": "Adds outputs to a given protobuf Bolt or Spout message" - }, - { - "code": "def send_buffered_messages(self):\n while not self.out_stream.is_empty() and self._stmgr_client.is_registered:\n tuple_set = self.out_stream.poll()\n if isinstance(tuple_set, tuple_pb2.HeronTupleSet):\n tuple_set.src_task_id = self.my_pplan_helper.my_task_id\n self.gateway_metrics.update_sent_packet(tuple_set.ByteSize())\n self._stmgr_client.send_message(tuple_set)", - "docstring": "Send messages in out_stream to the Stream Manager" - }, - { - "code": "def distribute_package(roles, cl_args):\n Log.info(\"Distributing heron package to nodes (this might take a while)...\")\n masters = roles[Role.MASTERS]\n slaves = roles[Role.SLAVES]\n tar_file = tempfile.NamedTemporaryFile(suffix=\".tmp\").name\n Log.debug(\"TAR file %s to %s\" % (cl_args[\"heron_dir\"], tar_file))\n make_tarfile(tar_file, cl_args[\"heron_dir\"])\n dist_nodes = masters.union(slaves)\n scp_package(tar_file, dist_nodes, cl_args)", - "docstring": "distribute Heron packages to all nodes" - }, - { - "code": "def strings_in_dictionary(dictionary):\n strings = [value for value in six.itervalues(dictionary) if not isinstance(value, dict)]\n for child_dict in [dv for dv in six.itervalues(dictionary) if isinstance(dv, dict)]:\n strings.extend(SearchResultProcessor.strings_in_dictionary(child_dict))\n return strings", - "docstring": "Used by default implementation for finding excerpt" - }, - { - "code": "def pythonize_arguments(arg_str):\n out_args = []\n if arg_str is None:\n return out_str\n args = arg_str.split(',')\n for arg in args:\n components = arg.split('=')\n name_and_type=components[0].split(' ')\n if name_and_type[-1]=='' and len(name_and_type)>1:\n name=name_and_type[-2]\n else:\n name=name_and_type[-1]\n if len(components)>1:\n name+='='+components[1]\n out_args.append(name)\n return ','.join(out_args)", - "docstring": "Remove types from function arguments in cython" - }, - { - "code": "def leave1out(learner, dataset):\n \"Leave one out cross-validation over the dataset.\"\n return cross_validation(learner, dataset, k=len(dataset.examples))", - "docstring": "Leave one out cross-validation over the dataset." - }, - { - "code": "def send_duplicate_notification(self):\n email_utils.send_email(\n from_email=settings.DEFAULT_FROM_EMAIL,\n recipient_list=[self.email],\n subject=_(\"Registration Attempt\"),\n template_name=\"rest_email_auth/emails/duplicate-email\",\n )\n logger.info(\"Sent duplicate email notification to: %s\", self.email)", - "docstring": "Send a notification about a duplicate signup." - }, - { - "code": "def annotate_metadata_dependencies(repo):\n options = repo.options\n if 'dependencies' not in options:\n print(\"No dependencies\")\n return []\n repos = []\n dependent_repos = options['dependencies']\n for d in dependent_repos:\n if \"/\" not in d:\n print(\"Invalid dependency specification\")\n (username, reponame) = d.split(\"/\")\n try:\n repos.append(repo.manager.lookup(username, reponame))\n except:\n print(\"Repository does not exist. Please create one\", d)\n package = repo.package\n package['dependencies'] = []\n for r in repos:\n package['dependencies'].append({\n 'username': r.username,\n 'reponame': r.reponame,\n })", - "docstring": "Collect information from the dependent repo's" - }, - { - "code": "def _get_mutagen_metadata(filepath):\n\ttry:\n\t\tmetadata = mutagen.File(filepath, easy=True)\n\texcept mutagen.MutagenError:\n\t\tlogger.warning(\"Can't load {} as music file.\".format(filepath))\n\t\traise\n\treturn metadata", - "docstring": "Get mutagen metadata dict from a file." - }, - { - "code": "def load_package(self):\n try:\n self.package = importlib.import_module(self.name)\n except ModuleNotFoundError:\n raise ModuleNotFoundError(\"Effect package '{}' not found.\".format(self.name))", - "docstring": "FInd the effect package" - }, - { - "code": "def resolve_domains(domains, disable_zone=False):\n dnsresolver = dns.resolver.Resolver()\n ips = []\n for domain in domains:\n print_notification(\"Resolving {}\".format(domain))\n try:\n result = dnsresolver.query(domain, 'A')\n for a in result.response.answer[0]:\n ips.append(str(a))\n if not disable_zone:\n ips.extend(zone_transfer(str(a), domain))\n except dns.resolver.NXDOMAIN as e:\n print_error(e)\n return ips", - "docstring": "Resolves the list of domains and returns the ips." - }, - { - "code": "def width(self):\n if len(self.coords) <= 1:\n return 0\n return np.max(self.xx) - np.min(self.xx)", - "docstring": "Get the width of a bounding box encapsulating the line." - }, - { - "code": "def heartbeat(self):\n heartbeat = (HEARTBEAT_CODE).to_bytes(4, \"little\")\n r = self.task_incoming.send(heartbeat)\n logger.debug(\"Return from heartbeat : {}\".format(r))", - "docstring": "Send heartbeat to the incoming task queue" - }, - { - "code": "def ipy_notebook_skeleton():\n py_version = sys.version_info\n notebook_skeleton = {\n \"cells\": [],\n \"metadata\": {\n \"kernelspec\": {\n \"display_name\": \"Python \" + str(py_version[0]),\n \"language\": \"python\",\n \"name\": \"python\" + str(py_version[0])\n },\n \"language_info\": {\n \"codemirror_mode\": {\n \"name\": \"ipython\",\n \"version\": py_version[0]\n },\n \"file_extension\": \".py\",\n \"mimetype\": \"text/x-python\",\n \"name\": \"python\",\n \"nbconvert_exporter\": \"python\",\n \"pygments_lexer\": \"ipython\" + str(py_version[0]),\n \"version\": '{0}.{1}.{2}'.format(*sys.version_info[:3])\n }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 0\n }\n return notebook_skeleton", - "docstring": "Returns a dictionary with the elements of a Jupyter notebook" - }, - { - "code": "def iso_reference_valid_char(c, raise_error=True):\n if c in ISO_REFERENCE_VALID:\n return True\n if raise_error:\n raise ValueError(\"'%s' is not in '%s'\" % (c, ISO_REFERENCE_VALID))\n return False", - "docstring": "Helper to make sure the given character is valid for a reference number" - }, - { - "code": "def search(self):\n try:\n filters = json.loads(self.query)\n except ValueError:\n return False\n result = self.model_query\n if 'filter'in filters.keys():\n result = self.parse_filter(filters['filter'])\n if 'sort'in filters.keys():\n result = result.order_by(*self.sort(filters['sort']))\n return result", - "docstring": "This is the most important method" - }, - { - "code": "def _MessageToJsonObject(message, including_default_value_fields):\n message_descriptor = message.DESCRIPTOR\n full_name = message_descriptor.full_name\n if _IsWrapperMessage(message_descriptor):\n return _WrapperMessageToJsonObject(message)\n if full_name in _WKTJSONMETHODS:\n return _WKTJSONMETHODS[full_name][0](\n message, including_default_value_fields)\n js = {}\n return _RegularMessageToJsonObject(\n message, js, including_default_value_fields)", - "docstring": "Converts message to an object according to Proto3 JSON Specification." - }, - { - "code": "def printParameters(self):\n print \"numberOfCols=\", self.numberOfCols\n print \"cellsPerColumn=\", self.cellsPerColumn\n print \"minThreshold=\", self.minThreshold\n print \"newSynapseCount=\", self.newSynapseCount\n print \"activationThreshold=\", self.activationThreshold\n print\n print \"initialPerm=\", self.initialPerm\n print \"connectedPerm=\", self.connectedPerm\n print \"permanenceInc=\", self.permanenceInc\n print \"permanenceDec=\", self.permanenceDec\n print \"permanenceMax=\", self.permanenceMax\n print \"globalDecay=\", self.globalDecay\n print\n print \"doPooling=\", self.doPooling\n print \"segUpdateValidDuration=\", self.segUpdateValidDuration\n print \"pamLength=\", self.pamLength", - "docstring": "Print the parameter settings for the TM." - }, - { - "code": "def parse_num(source, start, charset):\n while start < len(source) and source[start] in charset:\n start += 1\n return start", - "docstring": "Returns a first index>=start of chat not in charset" - }, - { - "code": "def _check_field_value(field_value, pattern):\n\tif isinstance(field_value, list):\n\t\treturn any(re.search(pattern, str(value), re.I) for value in field_value)\n\telse:\n\t\treturn re.search(pattern, str(field_value), re.I)", - "docstring": "Check a song metadata field value for a pattern." - }, - { - "code": "def uniform_noise(points):\n return np.random.rand(1) * np.random.uniform(points, 1) \\\n + random.sample([2, -2], 1)", - "docstring": "Init a uniform noise variable." - }, - { - "code": "def whitelist(self, account):\n assert callable(self.blockchain.account_whitelist)\n return self.blockchain.account_whitelist(account, lists=[\"white\"], account=self)", - "docstring": "Add an other account to the whitelist of this account" - }, - { - "code": "def validate_bucket_name(bucket):\n if not bucket.startswith('gs://'):\n raise ValueError(\n 'Invalid bucket path \"%s\". Must start with \"gs://\".' % bucket)\n bucket_name = bucket[len('gs://'):]\n if not re.search(r'^\\w[\\w_\\.-]{1,61}\\w$', bucket_name):\n raise ValueError('Invalid bucket name: %s' % bucket)", - "docstring": "Validate that the name is a valid GCS bucket." - }, - { - "code": "def edit(self, id, *args, **kw):\n if args and kw: \n return\n if args and type(args[0]) == dict:\n fields = [k for k in args[0]]\n v = [args[0][k] for k in args[0]]\n if kw:\n fields = [k for k in kw]\n v = [kw[k] for k in kw]\n sql = \"update \"+self._name+\" set \"+\"=?, \".join(fields)+\"=? where \"+self._key+\"=\"+unicode(id)\n self._db._cur.execute(sql, v)\n self._db._i += 1\n if self._db._i >= self._db._commit:\n self._db._i = 0\n self._db._con.commit()", - "docstring": "Edits the row with given id." - }, - { - "code": "def upgrade(self):\n assert callable(self.blockchain.upgrade_account)\n return self.blockchain.upgrade_account(account=self)", - "docstring": "Upgrade account to life time member" - }, - { - "code": "def spawn(self, generations):\n egg_donors = [god for god in self.gods.values() if god.chromosomes == 'XX']\n sperm_donors = [god for god in self.gods.values() if god.chromosomes == 'XY']\n for i in range(generations):\n print(\"\\nGENERATION %d\\n\" % (i+1))\n gen_xx = []\n gen_xy = []\n for egg_donor in egg_donors:\n sperm_donor = random.choice(sperm_donors)\n brood = self.breed(egg_donor, sperm_donor)\n for child in brood:\n if child.divinity > human:\n self.add_god(child)\n if child.chromosomes == 'XX':\n gen_xx.append(child)\n else:\n gen_xy.append(child)\n egg_donors = [ed for ed in egg_donors if ed.generation > (i-2)]\n sperm_donors = [sd for sd in sperm_donors if sd.generation > (i-3)]\n egg_donors += gen_xx\n sperm_donors += gen_xy", - "docstring": "Grow this Pantheon by multiplying Gods." - }, - { - "code": "def _format_task_uri(fmt, job_metadata, task_metadata):\n values = {\n 'job-id': None,\n 'task-id': 'task',\n 'job-name': None,\n 'user-id': None,\n 'task-attempt': None\n }\n for key in values:\n values[key] = task_metadata.get(key) or job_metadata.get(key) or values[key]\n return fmt.format(**values)", - "docstring": "Returns a URI with placeholders replaced by metadata values." - }, - { - "code": "def refresh(self, token, timeout=None):\n if timeout is None:\n timeout = LockManager.LOCK_TIME_OUT_DEFAULT\n return self.storage.refresh(token, timeout)", - "docstring": "Set new timeout for lock, if existing and valid." - }, - { - "code": "def _reportCommandLineUsageErrorAndExit(parser, message):\n print parser.get_usage()\n print message\n sys.exit(1)", - "docstring": "Report usage error and exit program with error indication." - }, - { - "code": "def getVersion():\n with open(os.path.join(REPO_DIR, \"VERSION\"), \"r\") as versionFile:\n return versionFile.read().strip()", - "docstring": "Get version from local file." - }, - { - "code": "def _get_default_namespace(self) -> Optional[Namespace]:\n return self._get_query(Namespace).filter(Namespace.url == self._get_namespace_url()).one_or_none()", - "docstring": "Get the reference BEL namespace if it exists." - }, - { - "code": "def update_function(self, param_vals):\n self.opt_obj.update_function(param_vals)\n return self.opt_obj.get_error()", - "docstring": "Updates the opt_obj, returns new error." - }, - { - "code": "def check_solver_status(status, raise_error=False):\n if status == OPTIMAL:\n return\n elif (status in has_primals) and not raise_error:\n warn(\"solver status is '{}'\".format(status), UserWarning)\n elif status is None:\n raise OptimizationError(\n \"model was not optimized yet or solver context switched\")\n else:\n raise OptimizationError(\"solver status is '{}'\".format(status))", - "docstring": "Perform standard checks on a solver's status." - }, - { - "code": "def storealleles(consens, hidx, alleles):\n bigbase = PRIORITY[consens[hidx[0]]]\n bigallele = [i for i in alleles if i[0] == bigbase][0]\n for hsite, pbase in zip(hidx[1:], bigallele[1:]):\n if PRIORITY[consens[hsite]] != pbase:\n consens[hsite] = consens[hsite].lower()\n return consens", - "docstring": "store phased allele data for diploids" - }, - { - "code": "def _parse_gcs_uri(self, raw_uri):\n raw_uri = directory_fmt(raw_uri)\n _, docker_path = _gcs_uri_rewriter(raw_uri)\n docker_uri = os.path.join(self._relative_path, docker_path)\n return docker_uri", - "docstring": "Return a valid docker_path for a GCS bucket." - }, - { - "code": "def unique(list):\n unique = []; [unique.append(x) for x in list if x not in unique]\n return unique", - "docstring": "Returns a copy of the list without duplicates." - }, - { - "code": "def log_update(entity, update):\n p = {'on': entity, 'update': update}\n _log(TYPE_CODES.UPDATE, p)", - "docstring": "Logs an update done on an entity" - }, - { - "code": "def do_unfullscreen(self, widget):\n self.unfullscreen()\n self.is_fullscreen = False\n self.bot._screen_ratio = None", - "docstring": "Widget Action to set Windowed Mode." - }, - { - "code": "def crown(self, depth=2):\n nodes = []\n for node in self.leaves: nodes += node.flatten(depth-1)\n return cluster.unique(nodes)", - "docstring": "Returns a list of leaves, nodes connected to leaves, etc." - }, - { - "code": "def _get_healthmgr_cmd(self):\n healthmgr_main_class = 'org.apache.heron.healthmgr.HealthManager'\n healthmgr_cmd = [os.path.join(self.heron_java_home, 'bin/java'),\n '-Xmx1024M',\n '-XX:+PrintCommandLineFlags',\n '-verbosegc',\n '-XX:+PrintGCDetails',\n '-XX:+PrintGCTimeStamps',\n '-XX:+PrintGCDateStamps',\n '-XX:+PrintGCCause',\n '-XX:+UseGCLogFileRotation',\n '-XX:NumberOfGCLogFiles=5',\n '-XX:GCLogFileSize=100M',\n '-XX:+PrintPromotionFailure',\n '-XX:+PrintTenuringDistribution',\n '-XX:+PrintHeapAtGC',\n '-XX:+HeapDumpOnOutOfMemoryError',\n '-XX:+UseConcMarkSweepGC',\n '-XX:+PrintCommandLineFlags',\n '-Xloggc:log-files/gc.healthmgr.log',\n '-Djava.net.preferIPv4Stack=true',\n '-cp', self.health_manager_classpath,\n healthmgr_main_class,\n \"--cluster\", self.cluster,\n \"--role\", self.role,\n \"--environment\", self.environment,\n \"--topology_name\", self.topology_name,\n \"--metricsmgr_port\", self.metrics_manager_port]\n return Command(healthmgr_cmd, self.shell_env)", - "docstring": "get the command to start the topology health manager processes" - }, - { - "code": "def from_file(cls, filename):\n with open(filename, \"r\") as pem_file:\n data = pem.readPemFromFile(pem_file)\n return cls.from_der_data(data)", - "docstring": "Load certificate from a file." - }, - { - "code": "def get(self, list_id, segment_id):\n return self._mc_client._get(url=self._build_path(list_id, 'segments', segment_id))", - "docstring": "returns the specified list segment." - }, - { - "code": "def removeAccount(self, account):\n accounts = self.getAccounts()\n for a in accounts:\n if a[\"name\"] == account:\n self.store.delete(a[\"pubkey\"])", - "docstring": "Remove all keys associated with a given account" - }, - { - "code": "def getinfo(self, name):\n rarinfo = self.NameToInfo.get(name)\n if rarinfo is None:\n raise KeyError('There is no item named %r in the archive' % name)\n return rarinfo", - "docstring": "Return the instance of RarInfo given 'name'." - }, - { - "code": "def enable_FTDI_driver():\n logger.debug('Enabling FTDI driver.')\n if sys.platform == 'darwin':\n logger.debug('Detected Mac OSX')\n _check_running_as_root()\n subprocess.check_call('kextload -b com.apple.driver.AppleUSBFTDI', shell=True)\n subprocess.check_call('kextload /System/Library/Extensions/FTDIUSBSerialDriver.kext', shell=True)\n elif sys.platform.startswith('linux'):\n logger.debug('Detected Linux')\n _check_running_as_root()\n subprocess.check_call('modprobe -q ftdi_sio', shell=True)\n subprocess.check_call('modprobe -q usbserial', shell=True)", - "docstring": "Re-enable the FTDI drivers for the current platform." - }, - { - "code": "def operation_list(uploader):\n files = uploader.file_list()\n for f in files:\n log.info(\"{file:30s} {size}\".format(file=f[0], size=f[1]))", - "docstring": "List file on target" - }, - { - "code": "def validate_response(expected_responses):\n def internal_decorator(function):\n @wraps(function)\n async def wrapper(*args, **kwargs):\n response = await function(*args, **kwargs)\n for expected_response in expected_responses:\n if response.startswith(expected_response):\n return response\n raise QRTCommandException(\n \"Expected %s but got %s\" % (expected_responses, response)\n )\n return wrapper\n return internal_decorator", - "docstring": "Decorator to validate responses from QTM" - }, - { - "code": "def nconflicts(self, var, val, assignment):\n \"Return the number of conflicts var=val has with other variables.\"\n def conflict(var2):\n return (var2 in assignment\n and not self.constraints(var, val, var2, assignment[var2]))\n return count_if(conflict, self.neighbors[var])", - "docstring": "Return the number of conflicts var=val has with other variables." - }, - { - "code": "def build_and_submit(self):\n class_dict = self._construct_topo_class_dict()\n topo_cls = TopologyType(self.topology_name, (Topology,), class_dict)\n topo_cls.write()", - "docstring": "Builds the topology and submits to the destination" - }, - { - "code": "def headerHTML(header,fname):\n html=\"\"\n html+=\"

%s

\"%(fname)\n html+=pprint.pformat(header, indent=1)\n html=html.replace(\"\\n\",'
').replace(\" \",\" \")\n html=html.replace(r\"\\x00\",\"\")\n html+=\"
\"\n print(\"saving header file:\",fname)\n f=open(fname,'w')\n f.write(html)\n f.close()\n webbrowser.open(fname)", - "docstring": "given the bytestring ABF header, make and launch HTML." - }, - { - "code": "def _default(self):\n try:\n iter(self.default)\n except TypeError:\n return repr(self.default)\n for v in self.default:\n if isinstance(v, Unparseable):\n default = self._default_value_only()\n if default:\n return default\n return ', '.join(str(v) for v in self.default)", - "docstring": "Return the default argument, formatted nicely." - }, - { - "code": "async def sync_all_new_events(self, sync_all_new_events_request):\n response = hangouts_pb2.SyncAllNewEventsResponse()\n await self._pb_request('conversations/syncallnewevents',\n sync_all_new_events_request, response)\n return response", - "docstring": "List all events occurring at or after a timestamp." - }, - { - "code": "def to_decimal(text):\n if not isinstance(text, string_type):\n raise TypeError(\"expected str or unicode, %s given\" % type(text))\n if findall(r\"[\\x00-\\x20\\x7c-\\xff]\", text):\n raise ValueError(\"invalid character in sequence\")\n text = text.lstrip('!')\n decimal = 0\n length = len(text) - 1\n for i, char in enumerate(text):\n decimal += (ord(char) - 33) * (91 ** (length - i))\n return decimal if text != '' else 0", - "docstring": "Takes a base91 char string and returns decimal" - }, - { - "code": "def populations_diff_coeff(particles, populations):\n D_counts = particles.diffusion_coeff_counts\n if len(D_counts) == 1:\n pop_sizes = [pop.stop - pop.start for pop in populations]\n assert D_counts[0][1] >= sum(pop_sizes)\n D_counts = [(D_counts[0][0], ps) for ps in pop_sizes]\n D_list = []\n D_pop_start = 0\n for pop, (D, counts) in zip(populations, D_counts):\n D_list.append(D)\n assert pop.start >= D_pop_start\n assert pop.stop <= D_pop_start + counts\n D_pop_start += counts\n return D_list", - "docstring": "Diffusion coefficients of the two specified populations." - }, - { - "code": "def _getStreamDef(self, modelDescription):\n aggregationPeriod = {\n 'days': 0,\n 'hours': 0,\n 'microseconds': 0,\n 'milliseconds': 0,\n 'minutes': 0,\n 'months': 0,\n 'seconds': 0,\n 'weeks': 0,\n 'years': 0,\n }\n aggFunctionsDict = {}\n if 'aggregation' in modelDescription['streamDef']:\n for key in aggregationPeriod.keys():\n if key in modelDescription['streamDef']['aggregation']:\n aggregationPeriod[key] = modelDescription['streamDef']['aggregation'][key]\n if 'fields' in modelDescription['streamDef']['aggregation']:\n for (fieldName, func) in modelDescription['streamDef']['aggregation']['fields']:\n aggFunctionsDict[fieldName] = str(func)\n hasAggregation = False\n for v in aggregationPeriod.values():\n if v != 0:\n hasAggregation = True\n break\n aggFunctionList = aggFunctionsDict.items()\n aggregationInfo = dict(aggregationPeriod)\n aggregationInfo['fields'] = aggFunctionList\n streamDef = copy.deepcopy(modelDescription['streamDef'])\n streamDef['aggregation'] = copy.deepcopy(aggregationInfo)\n return streamDef", - "docstring": "Generate stream definition based on" - }, - { - "code": "def buffers_exist(self):\n for buff in self.buffers:\n if not buff.is_separate_file:\n continue\n path = self.path.parent / buff.uri\n if not os.path.exists(path):\n raise FileNotFoundError(\"Buffer {} referenced in {} not found\".format(path, self.path))", - "docstring": "Checks if the bin files referenced exist" - }, - { - "code": "def _chunks(self, iterable, chunk_size):\n iterator = iter(iterable)\n chunk = list(itertools.islice(iterator, 0, chunk_size))\n while chunk:\n yield chunk\n chunk = list(itertools.islice(iterator, 0, chunk_size))", - "docstring": "Chunks data into chunk with size<=chunk_size." - }, - { - "code": "def sub_to_pix(self):\n return mapper_util.voronoi_sub_to_pix_from_grids_and_geometry(sub_grid=self.grid_stack.sub,\n regular_to_nearest_pix=self.grid_stack.pix.regular_to_nearest_pix,\n sub_to_regular=self.grid_stack.sub.sub_to_regular, pixel_centres=self.geometry.pixel_centres,\n pixel_neighbors=self.geometry.pixel_neighbors,\n pixel_neighbors_size=self.geometry.pixel_neighbors_size).astype('int')", - "docstring": "The 1D index mappings between the sub pixels and Voronoi pixelization pixels." - }, - { - "code": "def add(self, num_particles, D):\n self._plist += self._generate(num_particles, D, box=self.box,\n rs=self.rs)", - "docstring": "Add particles with diffusion coefficient `D` at random positions." - }, - { - "code": "def _invert(h):\n \"Cheap function to invert a hash.\"\n i = {}\n for k,v in h.items():\n i[v] = k\n return i", - "docstring": "Cheap function to invert a hash." - }, - { - "code": "def _restore_cache(gallery):\n cachePath = os.path.join(gallery.settings[\"destination\"], \".exif_cache\")\n try:\n if os.path.exists(cachePath):\n with open(cachePath, \"rb\") as cacheFile:\n gallery.exifCache = pickle.load(cacheFile)\n logger.debug(\"Loaded cache with %d entries\", len(gallery.exifCache))\n else:\n gallery.exifCache = {}\n except Exception as e:\n logger.warn(\"Could not load cache: %s\", e)\n gallery.exifCache = {}", - "docstring": "Restores the exif data cache from the cache file" - }, - { - "code": "def compress(self, filename):\n compressed_filename = self.get_compressed_filename(filename)\n if not compressed_filename:\n return\n self.do_compress(filename, compressed_filename)", - "docstring": "Compress a file, only if needed." - }, - { - "code": "def isfile(path):\n try:\n st = os.stat(path)\n except os.error:\n return False\n return stat.S_ISREG(st.st_mode)", - "docstring": "Test whether a path is a regular file" - }, - { - "code": "def _get_zoom(zoom, input_raster, pyramid_type):\n if not zoom:\n minzoom = 1\n maxzoom = get_best_zoom_level(input_raster, pyramid_type)\n elif len(zoom) == 1:\n minzoom = zoom[0]\n maxzoom = zoom[0]\n elif len(zoom) == 2:\n if zoom[0] < zoom[1]:\n minzoom = zoom[0]\n maxzoom = zoom[1]\n else:\n minzoom = zoom[1]\n maxzoom = zoom[0]\n return minzoom, maxzoom", - "docstring": "Determine minimum and maximum zoomlevel." - }, - { - "code": "def picknthweekday(year, month, dayofweek, hour, minute, whichweek):\n first = datetime.datetime(year, month, 1, hour, minute)\n weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)\n wd = weekdayone + ((whichweek - 1) * ONEWEEK)\n if (wd.month != month):\n wd -= ONEWEEK\n return wd", - "docstring": "dayofweek == 0 means Sunday, whichweek 5 means last instance" - }, - { - "code": "def joint_torques(self):\n return as_flat_array(getattr(j, 'amotor', j).feedback[-1][:j.ADOF]\n for j in self.joints)", - "docstring": "Get a list of all current joint torques in the skeleton." - }, - { - "code": "def _set_name(self, name):\n if self.own.get('name'):\n self.func_name = name\n self.own['name']['value'] = Js(name)", - "docstring": "name is py type" - }, - { - "code": "def delete_enterprise_learner_role_assignment(sender, instance, **kwargs):\n if instance.user:\n enterprise_learner_role, __ = SystemWideEnterpriseRole.objects.get_or_create(name=ENTERPRISE_LEARNER_ROLE)\n try:\n SystemWideEnterpriseUserRoleAssignment.objects.get(\n user=instance.user,\n role=enterprise_learner_role\n ).delete()\n except SystemWideEnterpriseUserRoleAssignment.DoesNotExist:\n pass", - "docstring": "Delete the associated enterprise learner role assignment record when deleting an EnterpriseCustomerUser record." - }, - { - "code": "def conflicted_vars(self, current):\n \"Return a list of variables in current assignment that are in conflict\"\n return [var for var in self.vars\n if self.nconflicts(var, current[var], current) > 0]", - "docstring": "Return a list of variables in current assignment that are in conflict" - }, - { - "code": "def sample_colormap(cmap_name, n_samples):\n colors = []\n colormap = cm.cmap_d[cmap_name]\n for i in np.linspace(0, 1, n_samples):\n colors.append(colormap(i))\n return colors", - "docstring": "Sample a colormap from matplotlib" - }, - { - "code": "def delete_roles_request(request):\n uuid_ = request.matchdict['uuid']\n posted_roles = request.json\n with db_connect() as db_conn:\n with db_conn.cursor() as cursor:\n remove_role_requests(cursor, uuid_, posted_roles)\n resp = request.response\n resp.status_int = 200\n return resp", - "docstring": "Submission to remove a role acceptance request." - }, - { - "code": "def _json(self, response, status_code):\n if isinstance(status_code, numbers.Integral):\n status_code = (status_code,)\n if response.status_code in status_code:\n return response.json()\n else:\n raise RuntimeError(\"Response has status \"\n \"code {} not {}\".format(response.status_code,\n status_code))", - "docstring": "Extract JSON from response if `status_code` matches." - }, - { - "code": "def availableTags(self):\n return [\n GithubComponentVersion(\n '', t[0], t[1], self.name, cache_key=_createCacheKey('tag', t[0], t[1], self.name)\n ) for t in self._getTags()\n ]", - "docstring": "return a list of GithubComponentVersion objects for all tags" - }, - { - "code": "def _create_boundary(message):\n if not message.is_multipart() or message.get_boundary() is not None:\n return message\n from future.backports.email.generator import Generator\n boundary = Generator._make_boundary(message.policy.linesep)\n message.set_param('boundary', boundary)\n return message", - "docstring": "Add boundary parameter to multipart message if they are not present." - }, - { - "code": "def open(name=None, fileobj=None, closefd=True):\n return Guesser().open(name=name, fileobj=fileobj, closefd=closefd)", - "docstring": "Use all decompressor possible to make the stream" - }, - { - "code": "def setsweeps(self):\n for sweep in range(self.sweeps):\n self.setsweep(sweep)\n yield self.sweep", - "docstring": "iterate over every sweep" - }, - { - "code": "def templates(self, timeout=None):\n return self._api_request(\n self.TEMPLATES_ENDPOINT,\n self.HTTP_GET,\n timeout=timeout\n )", - "docstring": "API call to get a list of templates" - }, - { - "code": "def listdir(path):\n try:\n cached_mtime, list = cache[path]\n del cache[path]\n except KeyError:\n cached_mtime, list = -1, []\n mtime = os.stat(path).st_mtime\n if mtime != cached_mtime:\n list = os.listdir(path)\n list.sort()\n cache[path] = mtime, list\n return list", - "docstring": "List directory contents, using cache." - }, - { - "code": "def check_mro(self, bases):\n try:\n self.add_node(\"temp\")\n for base in bases:\n nx.DiGraph.add_edge(self, base, \"temp\")\n result = self.get_mro(\"temp\")[1:]\n finally:\n self.remove_node(\"temp\")\n return result", - "docstring": "Check if C3 MRO is possible with given bases" - }, - { - "code": "def transform_path(self, path):\r\n p = path.__class__()\n for pt in path:\r\n if pt.cmd == \"close\":\r\n p.closepath()\r\n elif pt.cmd == \"moveto\":\r\n p.moveto(*self.apply(pt.x, pt.y))\r\n elif pt.cmd == \"lineto\":\r\n p.lineto(*self.apply(pt.x, pt.y))\r\n elif pt.cmd == \"curveto\":\r\n vx1, vy1 = self.apply(pt.ctrl1.x, pt.ctrl1.y)\r\n vx2, vy2 = self.apply(pt.ctrl2.x, pt.ctrl2.y)\r\n x, y = self.apply(pt.x, pt.y)\r\n p.curveto(vx1, vy1, vx2, vy2, x, y)\r\n return p", - "docstring": "Returns a BezierPath object with the transformation applied." - }, - { - "code": "async def set_conversation_notification_level(\n self, set_conversation_notification_level_request\n ):\n response = hangouts_pb2.SetConversationNotificationLevelResponse()\n await self._pb_request(\n 'conversations/setconversationnotificationlevel',\n set_conversation_notification_level_request, response\n )\n return response", - "docstring": "Set the notification level of a conversation." - }, - { - "code": "def _renamer(self, tre):\n names = tre.get_leaves()\n for name in names:\n name.name = self.samples[int(name.name)]\n return tre.write(format=9)", - "docstring": "renames newick from numbers to sample names" - }, - { - "code": "def _transmit_update(self, channel_metadata_item_map, transmission_map):\n for chunk in chunks(channel_metadata_item_map, self.enterprise_configuration.transmission_chunk_size):\n serialized_chunk = self._serialize_items(list(chunk.values()))\n try:\n self.client.update_content_metadata(serialized_chunk)\n except ClientError as exc:\n LOGGER.error(\n 'Failed to update [%s] content metadata items for integrated channel [%s] [%s]',\n len(chunk),\n self.enterprise_configuration.enterprise_customer.name,\n self.enterprise_configuration.channel_code,\n )\n LOGGER.error(exc)\n else:\n self._update_transmissions(chunk, transmission_map)", - "docstring": "Transmit content metadata update to integrated channel." - }, - { - "code": "def extract_hook_names(ent):\n hnames = []\n for hook in ent[\"hooks\"][\"enter\"] + ent[\"hooks\"][\"exit\"]:\n hname = os.path.basename(hook[\"fpath_orig\"])\n hname = os.path.splitext(hname)[0]\n hname = hname.strip()\n hname = hname.replace(\"_enter\", \"\")\n hname = hname.replace(\"_exit\", \"\")\n if hname in hnames:\n continue\n hnames.append(hname)\n hnames.sort()\n return hnames", - "docstring": "Extract hook names from the given entity" - }, - { - "code": "def list(self, filters, cursor, count):\n assert isinstance(filters, dict), \"expected filters type 'dict'\"\n assert isinstance(cursor, dict), \"expected cursor type 'dict'\"\n query = self.get_query()\n assert isinstance(query, peewee.Query)\n paginator = self.get_paginator()\n assert isinstance(paginator, Pagination)\n count += 1\n pquery = paginator.filter_query(query, cursor, count)\n items = [ item for item in pquery ]\n next_item = items.pop(1)\n next_cursor = next_item.to_cursor_ref()\n return items, next_cursor", - "docstring": "List items from query" - }, - { - "code": "def flush(self):\n queue = self.queue\n size = queue.qsize()\n queue.join()\n self.log.debug('successfully flushed %s items.', size)", - "docstring": "Forces a flush from the internal queue to the server" - }, - { - "code": "def track_enrollment(pathway, user_id, course_run_id, url_path=None):\n track_event(user_id, 'edx.bi.user.enterprise.onboarding', {\n 'pathway': pathway,\n 'url_path': url_path,\n 'course_run_id': course_run_id,\n })", - "docstring": "Emit a track event for enterprise course enrollment." - }, - { - "code": "def sixteen_oscillator_two_stimulated_ensembles_grid():\r\n \"Not accurate false due to spikes are observed\"\r\n parameters = legion_parameters();\r\n parameters.teta_x = -1.1;\r\n template_dynamic_legion(16, 2000, 1500, conn_type = conn_type.GRID_FOUR, params = parameters, stimulus = [1, 1, 1, 0, \r\n 1, 1, 1, 0, \r\n 0, 0, 0, 1, \r\n 0, 0, 1, 1]);", - "docstring": "Not accurate false due to spikes are observed" - }, - { - "code": "def assert_that(val, description=''):\n global _soft_ctx\n if _soft_ctx:\n return AssertionBuilder(val, description, 'soft')\n return AssertionBuilder(val, description)", - "docstring": "Factory method for the assertion builder with value to be tested and optional description." - }, - { - "code": "def load_widgets(context, **kwargs):\n _soft = kwargs.pop('_soft', False)\n try:\n widgets = context.render_context[WIDGET_CONTEXT_KEY]\n except KeyError:\n widgets = context.render_context[WIDGET_CONTEXT_KEY] = {}\n for alias, template_name in kwargs.items():\n if _soft and alias in widgets:\n continue\n with context.render_context.push({BLOCK_CONTEXT_KEY: BlockContext()}):\n blocks = resolve_blocks(template_name, context)\n widgets[alias] = blocks\n return ''", - "docstring": "Load a series of widget libraries." - }, - { - "code": "def magbin_varind_gridsearch_worker(task):\n simbasedir, gridpoint, magbinmedian = task\n try:\n res = get_recovered_variables_for_magbin(simbasedir,\n magbinmedian,\n stetson_stdev_min=gridpoint[0],\n inveta_stdev_min=gridpoint[1],\n iqr_stdev_min=gridpoint[2],\n statsonly=True)\n return res\n except Exception as e:\n LOGEXCEPTION('failed to get info for %s' % gridpoint)\n return None", - "docstring": "This is a parallel grid search worker for the function below." - }, - { - "code": "def measure_memory(cls, obj, seen=None):\n size = sys.getsizeof(obj)\n if seen is None:\n seen = set()\n obj_id = id(obj)\n if obj_id in seen:\n return 0\n seen.add(obj_id)\n if isinstance(obj, dict):\n size += sum([cls.measure_memory(v, seen) for v in obj.values()])\n size += sum([cls.measure_memory(k, seen) for k in obj.keys()])\n elif hasattr(obj, '__dict__'):\n size += cls.measure_memory(obj.__dict__, seen)\n elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)):\n size += sum([cls.measure_memory(i, seen) for i in obj])\n return size", - "docstring": "Recursively finds size of objects" - }, - { - "code": "def json(self):\n if not self._is_constructed() or self._is_require_reconstruction():\n self.constructTx()\n return dict(self)", - "docstring": "Show the transaction as plain json" - }, - { - "code": "def log_if(level, msg, condition, *args):\n if condition:\n vlog(level, msg, *args)", - "docstring": "Log 'msg % args' at level 'level' only if condition is fulfilled." - }, - { - "code": "def as_dict(self):\n self_as_dict = {'chrom': self.chrom,\n 'start': self.start,\n 'ref_allele': self.ref_allele,\n 'alt_alleles': self.alt_alleles,\n 'alleles': [x.as_dict() for x in self.alleles]}\n try:\n self_as_dict['info'] = self.info\n except AttributeError:\n pass\n return self_as_dict", - "docstring": "Dict representation of parsed VCF data" - }, - { - "code": "def store_args(method):\n argspec = inspect.getfullargspec(method)\n defaults = {}\n if argspec.defaults is not None:\n defaults = dict(\n zip(argspec.args[-len(argspec.defaults):], argspec.defaults))\n if argspec.kwonlydefaults is not None:\n defaults.update(argspec.kwonlydefaults)\n arg_names = argspec.args[1:]\n @functools.wraps(method)\n def wrapper(*positional_args, **keyword_args):\n self = positional_args[0]\n args = defaults.copy()\n for name, value in zip(arg_names, positional_args[1:]):\n args[name] = value\n args.update(keyword_args)\n self.__dict__.update(args)\n return method(*positional_args, **keyword_args)\n return wrapper", - "docstring": "Stores provided method args as instance attributes." - }, - { - "code": "def _keywords(self):\n meta = self.find(\"meta\", {\"name\":\"keywords\"})\n if isinstance(meta, dict) and \\\n meta.has_key(\"content\"):\n keywords = [k.strip() for k in meta[\"content\"].split(\",\")]\n else:\n keywords = []\n return keywords", - "docstring": "Returns the meta keywords in the page." - }, - { - "code": "def missing_some(data, min_required, args):\n if min_required < 1:\n return []\n found = 0\n not_found = object()\n ret = []\n for arg in args:\n if get_var(data, arg, not_found) is not_found:\n ret.append(arg)\n else:\n found += 1\n if found >= min_required:\n return []\n return ret", - "docstring": "Implements the missing_some operator for finding missing variables." - }, - { - "code": "def hangup_call(self, call_params):\n path = '/' + self.api_version + '/HangupCall/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Hangup Live Call Helper" - }, - { - "code": "def _normalize_metadata(metadata):\n\tmetadata = str(metadata)\n\tmetadata = metadata.lower()\n\tmetadata = re.sub(r'\\/\\s*\\d+', '', metadata)\n\tmetadata = re.sub(r'^0+([0-9]+)', r'\\1', metadata)\n\tmetadata = re.sub(r'^\\d+\\.+', '', metadata)\n\tmetadata = re.sub(r'[^\\w\\s]', '', metadata)\n\tmetadata = re.sub(r'\\s+', ' ', metadata)\n\tmetadata = re.sub(r'^\\s+', '', metadata)\n\tmetadata = re.sub(r'\\s+$', '', metadata)\n\tmetadata = re.sub(r'^the\\s+', '', metadata, re.I)\n\treturn metadata", - "docstring": "Normalize metadata to improve match accuracy." - }, - { - "code": "def payload_class_for_element_name(element_name):\n logger.debug(\" looking up payload class for element: {0!r}\".format(\n element_name))\n logger.debug(\" known: {0!r}\".format(STANZA_PAYLOAD_CLASSES))\n if element_name in STANZA_PAYLOAD_CLASSES:\n return STANZA_PAYLOAD_CLASSES[element_name]\n else:\n return XMLPayload", - "docstring": "Return a payload class for given element name." - }, - { - "code": "def deserialize(cls, dict_model):\n kwargs = {}\n for f in cls._meta.concrete_fields:\n if f.attname in dict_model:\n kwargs[f.attname] = dict_model[f.attname]\n return cls(**kwargs)", - "docstring": "Returns an unsaved class object based on the valid properties passed in." - }, - { - "code": "def join_dicts(*dicts):\n out_dict = {}\n for d in dicts:\n for k, v in d.iteritems():\n if not type(v) in JOINERS:\n raise KeyError('Invalid type in dict: {}'.format(type(v)))\n JOINERS[type(v)](out_dict, k, v)\n return out_dict", - "docstring": "Join a bunch of dicts" - }, - { - "code": "def _delete_resource(self, url):\n headers = {\"Content-Type\": \"application/json\",\n \"Accept\": \"application/json\"}\n if self.token:\n headers[\"W-Token\"] = \"%s\" % self.token\n response = WhenIWork_DAO().deleteURL(url, headers)\n if not (response.status == 200 or response.status == 201 or\n response.status == 204):\n raise DataFailureException(url, response.status, response.data)\n return json.loads(response.data)", - "docstring": "When I Work DELETE method." - }, - { - "code": "def add_runtime(function):\n def wrapper(*args,**kwargs): \n pr=cProfile.Profile()\n pr.enable()\n output = function(*args,**kwargs)\n pr.disable()\n return pr,output\n return wrapper", - "docstring": "Decorator that adds a runtime profile object to the output" - }, - { - "code": "def filesByCell(fnames,cells):\n byCell={}\n fnames=smartSort(fnames)\n days = list(set([elem[:5] for elem in fnames if elem.endswith(\".abf\")]))\n for day in smartSort(days):\n parent=None\n for i,fname in enumerate([elem for elem in fnames if elem.startswith(day) and elem.endswith(\".abf\")]):\n ID=os.path.splitext(fname)[0]\n if len([x for x in fnames if x.startswith(ID)])-1:\n parent=ID\n if not parent in byCell:\n byCell[parent]=[]\n byCell[parent]=byCell[parent]+[fname]\n return byCell", - "docstring": "given files and cells, return a dict of files grouped by cell." - }, - { - "code": "def create_widget(self):\n self.init_options()\n MapFragment.newInstance(self.options).then(\n self.on_map_fragment_created)\n self.widget = FrameLayout(self.get_context())\n self.map = GoogleMap(__id__=bridge.generate_id())", - "docstring": "Create the underlying widget." - }, - { - "code": "def drawCircle(self, x0, y0, r, color=None):\n md.draw_circle(self.set, x0, y0, r, color)", - "docstring": "Draw a circle in an RGB color, with center x0, y0 and radius r." - }, - { - "code": "def analyzeAll(self):\n searchableData=str(self.files2)\n self.log.debug(\"considering analysis for %d ABFs\",len(self.IDs))\n for ID in self.IDs:\n if not ID+\"_\" in searchableData:\n self.log.debug(\"%s needs analysis\",ID)\n try:\n self.analyzeABF(ID)\n except:\n print(\"EXCEPTION! \"*100)\n else:\n self.log.debug(\"%s has existing analysis, not overwriting\",ID)\n self.log.debug(\"verified analysis of %d ABFs\",len(self.IDs))", - "docstring": "analyze every unanalyzed ABF in the folder." - }, - { - "code": "def restore_python(self):\n orig = self.orig_settings\n sys.setrecursionlimit(orig[\"sys.recursionlimit\"])\n if \"sys.tracebacklimit\" in orig:\n sys.tracebacklimit = orig[\"sys.tracebacklimit\"]\n else:\n if hasattr(sys, \"tracebacklimit\"):\n del sys.tracebacklimit\n if \"showwarning\" in orig:\n warnings.showwarning = orig[\"showwarning\"]\n orig.clear()\n threading.stack_size()", - "docstring": "Restore Python settings to the original states" - }, - { - "code": "def handle_upload(self, request):\n if request.method != 'POST':\n raise Http404\n if request.is_ajax():\n try:\n filename = request.GET['quillUploadFile']\n data = request\n is_raw = True\n except KeyError:\n return HttpResponseBadRequest(\"Invalid file upload.\")\n else:\n if len(request.FILES) != 1:\n return HttpResponseBadRequest(\"Can only upload 1 file at a time.\")\n try:\n data = request.FILES['quillUploadFile']\n filename = data.name\n is_raw = False\n except KeyError:\n return HttpResponseBadRequest('Missing image `quillUploadFile`.')\n url = save_file(data, filename, is_raw, default_storage)\n response_data = {}\n response_data['url'] = url\n return HttpResponse(json.dumps(response_data), content_type=\"text/html; charset=utf-8\")", - "docstring": "Handle file uploads from WYSIWYG." - }, - { - "code": "def render_to(self, path, template, **data):\n html = self.render(template, **data)\n with open(path, 'w') as f:\n f.write(html.encode(charset))", - "docstring": "Render data with template and then write to path" - }, - { - "code": "def write(self, msg):\n if self.redirect is not None:\n self.redirect.write(msg)\n if six.PY2:\n from xdoctest.utils.util_str import ensure_unicode\n msg = ensure_unicode(msg)\n super(TeeStringIO, self).write(msg)", - "docstring": "Write to this and the redirected stream" - }, - { - "code": "def add_nations_field(authors_subfields):\n from .config import NATIONS_DEFAULT_MAP\n result = []\n for field in authors_subfields:\n if field[0] == 'v':\n values = [x.replace('.', '') for x in field[1].split(', ')]\n possible_affs = filter(lambda x: x is not None,\n map(NATIONS_DEFAULT_MAP.get, values))\n if 'CERN' in possible_affs and 'Switzerland' in possible_affs:\n possible_affs = [x for x in possible_affs\n if x != 'Switzerland']\n result.extend(possible_affs)\n result = sorted(list(set(result)))\n if result:\n authors_subfields.extend([('w', res) for res in result])\n else:\n authors_subfields.append(('w', 'HUMAN CHECK'))", - "docstring": "Add correct nations field according to mapping in NATIONS_DEFAULT_MAP." - }, - { - "code": "def add_namespace_to_graph(self, graph: BELGraph) -> Namespace:\n namespace = self.upload_bel_namespace()\n graph.namespace_url[namespace.keyword] = namespace.url\n self._add_annotation_to_graph(graph)\n return namespace", - "docstring": "Add this manager's namespace to the graph." - }, - { - "code": "async def get_self_info(self, get_self_info_request):\n response = hangouts_pb2.GetSelfInfoResponse()\n await self._pb_request('contacts/getselfinfo',\n get_self_info_request, response)\n return response", - "docstring": "Return info about the current user." - }, - { - "code": "def ensure_format(doc, format):\n assert format in ('xml', 'json')\n if getattr(doc, 'tag', None) == 'open511':\n if format == 'json':\n return xml_to_json(doc)\n elif isinstance(doc, dict) and 'meta' in doc:\n if format == 'xml':\n return json_doc_to_xml(doc)\n else:\n raise ValueError(\"Unrecognized input document\")\n return doc", - "docstring": "Ensures that the provided document is an lxml Element or json dict." - }, - { - "code": "def info(self):\n res = self.redis.execute_command('FT.INFO', self.index_name)\n it = six.moves.map(to_string, res)\n return dict(six.moves.zip(it, it))", - "docstring": "Get info an stats about the the current index, including the number of documents, memory consumption, etc" - }, - { - "code": "def add(self, requester: int, track: dict):\r\n self.queue.append(AudioTrack().build(track, requester))", - "docstring": "Adds a track to the queue." - }, - { - "code": "def max_item(self):\n if self.is_empty():\n raise ValueError(\"Tree is empty\")\n node = self._root\n while node.right is not None:\n node = node.right\n return node.key, node.value", - "docstring": "Get item with max key of tree, raises ValueError if tree is empty." - }, - { - "code": "def isSprintCompleted(self, sprintIdx):\n numExistingSprints = len(self._state['sprints'])\n if sprintIdx >= numExistingSprints:\n return False\n return (self._state['sprints'][sprintIdx]['status'] == 'completed')", - "docstring": "Return True if the given sprint has completed." - }, - { - "code": "def load(self):\n if not os.path.exists(self.path):\n return\n with open(self.path, 'r') as f:\n env_data = yaml.load(f.read())\n if env_data:\n for env in env_data:\n self.add(VirtualEnvironment(env['root']))", - "docstring": "Load the environment cache from disk." - }, - { - "code": "def current_human_transaction(self):\n try:\n tx, _, _, _, _ = self._callstack[0]\n if tx.result is not None:\n return None\n assert tx.depth == 0\n return tx\n except IndexError:\n return None", - "docstring": "Current ongoing human transaction" - }, - { - "code": "def filter_dict(unfiltered, filter_keys):\n filtered = DotDict()\n for k in filter_keys:\n filtered[k] = unfiltered[k]\n return filtered", - "docstring": "Return a subset of a dictionary using the specified keys." - }, - { - "code": "def update_time(sender, **kwargs):\n comment = kwargs['instance']\n if comment.content_type.app_label == \"happenings\" and comment.content_type.name == \"Update\":\n from .models import Update\n item = Update.objects.get(id=comment.object_pk)\n item.save()", - "docstring": "When a Comment is added, updates the Update to set \"last_updated\" time" - }, - { - "code": "def create(_):\n endpoint = client_endpoint()\n if not endpoint:\n raise CLIError(\"Connection endpoint not found. \"\n \"Before running sfctl commands, connect to a cluster using \"\n \"the 'sfctl cluster select' command.\")\n no_verify = no_verify_setting()\n if security_type() == 'aad':\n auth = AdalAuthentication(no_verify)\n else:\n cert = cert_info()\n ca_cert = ca_cert_info()\n auth = ClientCertAuthentication(cert, ca_cert, no_verify)\n return ServiceFabricClientAPIs(auth, base_url=endpoint)", - "docstring": "Create a client for Service Fabric APIs." - }, - { - "code": "def df_quantile(df, nb=100):\n quantiles = np.linspace(0, 1., nb)\n res = pd.DataFrame()\n for q in quantiles:\n res = res.append(df.quantile(q), ignore_index=True)\n return res", - "docstring": "Returns the nb quantiles for datas in a dataframe" - }, - { - "code": "def next(self):\n queue = self.queue\n items = []\n item = self.next_item()\n if item is None:\n return items\n items.append(item)\n while len(items) < self.upload_size and not queue.empty():\n item = self.next_item()\n if item:\n items.append(item)\n return items", - "docstring": "Return the next batch of items to upload." - }, - { - "code": "def add_texture_dir(self, directory):\n dirs = list(self.TEXTURE_DIRS)\n dirs.append(directory)\n self.TEXTURE_DIRS = dirs", - "docstring": "Hack in texture directory" - }, - { - "code": "def instruction_in_grid(self, instruction):\n row_position = self._rows_in_grid[instruction.row].xy\n x = instruction.index_of_first_consumed_mesh_in_row\n position = Point(row_position.x + x, row_position.y)\n return InstructionInGrid(instruction, position)", - "docstring": "Returns an `InstructionInGrid` object for the `instruction`" - }, - { - "code": "def pid(self):\n pid = self.deposit_fetcher(self.id, self)\n return PersistentIdentifier.get(pid.pid_type,\n pid.pid_value)", - "docstring": "Return an instance of deposit PID." - }, - { - "code": "def main(argv=None):\n if argv is None:\n argv = sys.argv[1:]\n if not argv or argv[0] == \"help\":\n show_help()\n return 0\n elif argv[0] == \"check\":\n return check_main(argv[1:])\n elif argv[0] == \"list\":\n return list_main(argv[1:])\n elif argv[0] == \"write\":\n return write_main(argv[1:])\n else:\n print(u\"Don't understand {!r}\".format(\" \".join(argv)))\n show_help()\n return 1", - "docstring": "The edx_lint command entry point." - }, - { - "code": "def find_repos(self, depth=10):\n repos = []\n for root, subdirs, files in walk_dn(self.root, depth=depth):\n if 'modules' in root:\n continue\n if '.git' in subdirs:\n repos.append(root)\n return repos", - "docstring": "Get all git repositories within this environment" - }, - { - "code": "def system(cmd, data=None):\n import subprocess\n s = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE)\n out, err = s.communicate(data)\n return out.decode('utf8')", - "docstring": "pipes the output of a program" - }, - { - "code": "def deleteByPk(self, pk):\n\t\tobj = self.mdl.objects.getOnlyIndexedFields(pk)\n\t\tif not obj:\n\t\t\treturn 0\n\t\treturn self.deleteOne(obj)", - "docstring": "deleteByPk - Delete object associated with given primary key" - }, - { - "code": "def hierarchy(annotation, **kwargs):\n htimes, hlabels = hierarchy_flatten(annotation)\n htimes = [np.asarray(_) for _ in htimes]\n return mir_eval.display.hierarchy(htimes, hlabels, **kwargs)", - "docstring": "Plotting wrapper for hierarchical segmentations" - }, - { - "code": "def show(self):\n self.parent.addLayout(self._logSelectLayout)\n self.menuCount += 1\n self._connectSlots()", - "docstring": "Display menus and connect even signals." - }, - { - "code": "def write(self, bytes_):\n string = bytes_.decode(self._encoding)\n self._file.write(string)", - "docstring": "Write bytes to the file." - }, - { - "code": "def disk(self):\n r = self.local_renderer\n r.run(r.env.disk_usage_command)", - "docstring": "Display percent of disk usage." - }, - { - "code": "def mget(self, keys, *args):\n args = list_or_args(keys, args)\n server_keys = {}\n ret_dict = {}\n for key in args:\n server_name = self.get_server_name(key)\n server_keys[server_name] = server_keys.get(server_name, [])\n server_keys[server_name].append(key)\n for server_name, sub_keys in iteritems(server_keys):\n values = self.connections[server_name].mget(sub_keys)\n ret_dict.update(dict(zip(sub_keys, values)))\n result = []\n for key in args:\n result.append(ret_dict.get(key, None))\n return result", - "docstring": "Returns a list of values ordered identically to ``keys``" - }, - { - "code": "def can_run_from_cli(self):\n ret = False\n ast_tree = ast.parse(self.body, self.path)\n calls = self._find_calls(ast_tree, __name__, \"exit\")\n for call in calls:\n if re.search(\"{}\\(\".format(re.escape(call)), self.body):\n ret = True\n break\n return ret", - "docstring": "return True if this script can be run from the command line" - }, - { - "code": "def year(past=False, min_delta=0, max_delta=20):\n return dt.date.today().year + _delta(past, min_delta, max_delta)", - "docstring": "Return a random year." - }, - { - "code": "async def _sync(self):\n logger.info('Syncing events since {}'.format(self._sync_timestamp))\n try:\n res = await self._client.sync_all_new_events(\n hangouts_pb2.SyncAllNewEventsRequest(\n request_header=self._client.get_request_header(),\n last_sync_timestamp=parsers.to_timestamp(\n self._sync_timestamp\n ),\n max_response_size_bytes=1048576,\n )\n )\n except exceptions.NetworkError as e:\n logger.warning('Failed to sync events, some events may be lost: {}'\n .format(e))\n else:\n for conv_state in res.conversation_state:\n conv_id = conv_state.conversation_id.id\n conv = self._conv_dict.get(conv_id, None)\n if conv is not None:\n conv.update_conversation(conv_state.conversation)\n for event_ in conv_state.event:\n timestamp = parsers.from_timestamp(event_.timestamp)\n if timestamp > self._sync_timestamp:\n await self._on_event(event_)\n else:\n self._add_conversation(\n conv_state.conversation,\n conv_state.event,\n conv_state.event_continuation_token\n )", - "docstring": "Sync conversation state and events that could have been missed." - }, - { - "code": "def setup_paths(source, destination, name, add_to_global, force):\n if source[-1] == \"/\":\n source = source[:-1]\n if not name:\n name = os.path.split(source)[-1]\n elif name.endswith(\".docset\"):\n name = name.replace(\".docset\", \"\")\n if add_to_global:\n destination = DEFAULT_DOCSET_PATH\n dest = os.path.join(destination or \"\", name + \".docset\")\n dst_exists = os.path.lexists(dest)\n if dst_exists and force:\n shutil.rmtree(dest)\n elif dst_exists:\n log.error(\n 'Destination path \"{}\" already exists.'.format(\n click.format_filename(dest)\n )\n )\n raise SystemExit(errno.EEXIST)\n return source, dest, name", - "docstring": "Determine source and destination using the options." - }, - { - "code": "def subseq(self, start, end):\n return Fasta(self.id, self.seq[start:end])", - "docstring": "Returns Fasta object with the same name, of the bases from start to end, but not including end" - }, - { - "code": "def getaddrspec(self):\n aslist = []\n self.gotonext()\n while self.pos < len(self.field):\n if self.field[self.pos] == '.':\n aslist.append('.')\n self.pos += 1\n elif self.field[self.pos] == '\"':\n aslist.append('\"%s\"' % self.getquote())\n elif self.field[self.pos] in self.atomends:\n break\n else: aslist.append(self.getatom())\n self.gotonext()\n if self.pos >= len(self.field) or self.field[self.pos] != '@':\n return ''.join(aslist)\n aslist.append('@')\n self.pos += 1\n self.gotonext()\n return ''.join(aslist) + self.getdomain()", - "docstring": "Parse an RFC 2822 addr-spec." - }, - { - "code": "def remove_binaries():\r\n patterns = (\r\n \"adslib/*.a\",\r\n \"adslib/*.o\",\r\n \"adslib/obj/*.o\",\r\n \"adslib/*.bin\",\r\n \"adslib/*.so\",\r\n )\r\n for f in functools.reduce(operator.iconcat, [glob.glob(p) for p in patterns]):\r\n os.remove(f)", - "docstring": "Remove all binary files in the adslib directory." - }, - { - "code": "def check_example(self, example):\n \"Raise ValueError if example has any invalid values.\"\n if self.values:\n for a in self.attrs:\n if example[a] not in self.values[a]:\n raise ValueError('Bad value %s for attribute %s in %s' %\n (example[a], self.attrnames[a], example))", - "docstring": "Raise ValueError if example has any invalid values." - }, - { - "code": "def make_response(self, data=None):\n if data is not None:\n data = self.prepare(data)\n self.response.write(data, serialize=True)", - "docstring": "Fills the response object from the passed data." - }, - { - "code": "def update(self, key, value):\n if key not in self.value:\n self.value[key] = ReducedMetric(self.reducer)\n self.value[key].update(value)", - "docstring": "Updates a value of a given key and apply reduction" - }, - { - "code": "def extract_params(source):\n funcdef = find_funcdef(source)\n params = []\n for node in ast.walk(funcdef.args):\n if isinstance(node, ast.arg):\n if node.arg not in params:\n params.append(node.arg)\n return params", - "docstring": "Extract parameters from a function definition" - }, - { - "code": "def create_patch(self, from_tag, to_tag):\n return str(self._git.diff('{}..{}'.format(from_tag, to_tag), _tty_out=False))", - "docstring": "Create a patch between tags" - }, - { - "code": "def _sign_data(self, data, options):\n if options['signature_algorithm_id'] not in self.signature_algorithms:\n raise Exception('Unknown signature algorithm id: %d'\n % options['signature_algorithm_id'])\n signature_algorithm = \\\n self.signature_algorithms[options['signature_algorithm_id']]\n algorithm = self._get_algorithm_info(signature_algorithm)\n key_salt = get_random_bytes(algorithm['salt_size'])\n key = self._generate_key(options['signature_passphrase_id'],\n self.signature_passphrases, key_salt, algorithm)\n data = self._encode(data, algorithm, key)\n return data + key_salt", - "docstring": "Add signature to data" - }, - { - "code": "def _process_current(self, handle, op, dest_path=None, dest_name=None):\n unrarlib.RARProcessFileW(handle, op, dest_path, dest_name)", - "docstring": "Process current member with 'op' operation." - }, - { - "code": "def phrase_to_filename(self, phrase):\n name = re.sub(r\"[^\\w\\s\\.]\", '', phrase.strip().lower())\n name = re.sub(r\"\\s+\", '_', name)\n return name + '.png'", - "docstring": "Convert phrase to normilized file name." - }, - { - "code": "def get(self):\n fields = [c.get() for c in self.comps]\n return self.field_reduce_func(fields)", - "docstring": "Combine the fields from all components" - }, - { - "code": "def output(self):\n output_params = dict(\n self._raw[\"output\"],\n grid=self.output_pyramid.grid,\n pixelbuffer=self.output_pyramid.pixelbuffer,\n metatiling=self.output_pyramid.metatiling\n )\n if \"path\" in output_params:\n output_params.update(\n path=absolute_path(path=output_params[\"path\"], base_dir=self.config_dir)\n )\n if \"format\" not in output_params:\n raise MapcheteConfigError(\"output format not specified\")\n if output_params[\"format\"] not in available_output_formats():\n raise MapcheteConfigError(\n \"format %s not available in %s\" % (\n output_params[\"format\"], str(available_output_formats())\n )\n )\n writer = load_output_writer(output_params)\n try:\n writer.is_valid_with_config(output_params)\n except Exception as e:\n logger.exception(e)\n raise MapcheteConfigError(\n \"driver %s not compatible with configuration: %s\" % (\n writer.METADATA[\"driver_name\"], e\n )\n )\n return writer", - "docstring": "Output object of driver." - }, - { - "code": "def map_function(self, func, *arg_lists):\n return GridStack(*[func(*args) for args in zip(self, *arg_lists)])", - "docstring": "Map a function to all grid_stack in a grid-stack" - }, - { - "code": "def parse(self, data):\n try:\n return self.yacc.parse(data, lexer=self.lex)\n except:\n return None", - "docstring": "Parses a license list and returns a License or None if it failed." - }, - { - "code": "def const_equal(str_a, str_b):\n if len(str_a) != len(str_b):\n return False\n result = True\n for i in range(len(str_a)):\n result &= (str_a[i] == str_b[i])\n return result", - "docstring": "Constant time string comparison" - }, - { - "code": "def _activate_organization_course_relationship(relationship):\n relationship = internal.OrganizationCourse.objects.get(\n id=relationship.id,\n active=False,\n organization__active=True\n )\n _activate_record(relationship)", - "docstring": "Activates an inactive organization-course relationship" - }, - { - "code": "def readtxt(filepath):\n with open(filepath, 'rt') as f:\n lines = f.readlines()\n return ''.join(lines)", - "docstring": "read file as is" - }, - { - "code": "def next(self):\n self.index += 1\n t = self.peek()\n if not self.depth:\n self._cut()\n return t", - "docstring": "Advances to and returns the next token or returns EndOfFile" - }, - { - "code": "def euclidean(c1, c2):\n diffs = ((i - j) for i, j in zip(c1, c2))\n return sum(x * x for x in diffs)", - "docstring": "Square of the euclidean distance" - }, - { - "code": "def score(self, plaintext):\n \"Return a score for text based on how common letters pairs are.\"\n s = 1.0\n for bi in bigrams(plaintext):\n s = s * self.P2[bi]\n return s", - "docstring": "Return a score for text based on how common letters pairs are." - }, - { - "code": "def rUpdate(original, updates):\n dictPairs = [(original, updates)]\n while len(dictPairs) > 0:\n original, updates = dictPairs.pop()\n for k, v in updates.iteritems():\n if k in original and isinstance(original[k], dict) and isinstance(v, dict):\n dictPairs.append((original[k], v))\n else:\n original[k] = v", - "docstring": "Recursively updates the values in original with the values from updates." - }, - { - "code": "def plot_nodes(self, nodelist, theta, group):\n for i, node in enumerate(nodelist):\n r = self.internal_radius + i * self.scale\n x, y = get_cartesian(r, theta)\n circle = plt.Circle(xy=(x, y), radius=self.dot_radius,\n color=self.node_colormap[group], linewidth=0)\n self.ax.add_patch(circle)", - "docstring": "Plots nodes to screen." - }, - { - "code": "def create_module(name, code=None):\n if name not in sys.modules:\n sys.modules[name] = imp.new_module(name)\n module = sys.modules[name]\n if code:\n print('executing code for %s: %s' % (name, code))\n exec(code in module.__dict__)\n exec(\"from %s import %s\" % (name, '*'))\n return module", - "docstring": "Dynamically creates a module with the given name." - }, - { - "code": "def search_news(q, start=0, wait=10, asynchronous=False, cached=False):\n service = GOOGLE_NEWS\n return GoogleSearch(q, start, service, \"\", wait, asynchronous, cached)", - "docstring": "Returns a Google news query formatted as a GoogleSearch list object." - }, - { - "code": "def emit(self, what, *args):\n if isinstance(what, basestring):\n return self.exe.emit(what, *args)\n elif isinstance(what, list):\n self._emit_statement_list(what)\n else:\n return getattr(self, what['type'])(**what)", - "docstring": "what can be either name of the op, or node, or a list of statements." - }, - { - "code": "def str_cmd(cmd, cwd, env):\n process = subprocess.Popen(cmd, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=cwd, env=env)\n stdout_builder, stderr_builder = proc.async_stdout_stderr_builder(process)\n process.wait()\n stdout, stderr = stdout_builder.result(), stderr_builder.result()\n return {'command': ' '.join(cmd), 'stderr': stderr, 'stdout': stdout}", - "docstring": "Runs the command and returns its stdout and stderr." - }, - { - "code": "def update(self, server):\n return server.put(\n 'task_admin',\n self.as_payload(),\n replacements={\n 'slug': self.__challenge__.slug,\n 'identifier': self.identifier})", - "docstring": "Update existing task on the server" - }, - { - "code": "def create(self, *args, **kwargs):\n is_primary = kwargs.pop(\"is_primary\", False)\n with transaction.atomic():\n email = super(EmailAddressManager, self).create(*args, **kwargs)\n if is_primary:\n email.set_primary()\n return email", - "docstring": "Create a new email address." - }, - { - "code": "def parse_definite_clause(s):\n \"Return the antecedents and the consequent of a definite clause.\"\n assert is_definite_clause(s)\n if is_symbol(s.op):\n return [], s\n else:\n antecedent, consequent = s.args\n return conjuncts(antecedent), consequent", - "docstring": "Return the antecedents and the consequent of a definite clause." - }, - { - "code": "def dictFlat(l):\n if type(l) is dict:\n return [l]\n if \"numpy\" in str(type(l)):\n return l\n dicts=[]\n for item in l:\n if type(item)==dict:\n dicts.append(item)\n elif type(item)==list:\n for item2 in item:\n dicts.append(item2)\n return dicts", - "docstring": "Given a list of list of dicts, return just the dicts." - }, - { - "code": "def index(self, i, length=None):\n if self.begin <= i <= self.end:\n index = i - self.BEGIN - self.offset\n if length is None:\n length = self.full_range()\n else:\n length = min(length, self.full_range())\n if 0 <= index < length:\n return index", - "docstring": "Return an integer index or None" - }, - { - "code": "def requests():\n page = request.args.get('page', 1, type=int)\n per_page = request.args.get('per_page', 5, type=int)\n memberships = Membership.query_requests(current_user, eager=True).all()\n return render_template(\n 'invenio_groups/pending.html',\n memberships=memberships,\n requests=True,\n page=page,\n per_page=per_page,\n )", - "docstring": "List all pending memberships, listed only for group admins." - }, - { - "code": "def pointwise_product(self, other, bn):\n \"Multiply two factors, combining their variables.\"\n vars = list(set(self.vars) | set(other.vars))\n cpt = dict((event_values(e, vars), self.p(e) * other.p(e))\n for e in all_events(vars, bn, {}))\n return Factor(vars, cpt)", - "docstring": "Multiply two factors, combining their variables." - }, - { - "code": "def file_print(self, filename):\n log.info('Printing ' + filename)\n res = self.__exchange(PRINT_FILE.format(filename=filename))\n log.info(res)\n return res", - "docstring": "Prints a file on the device to console" - }, - { - "code": "def fill_boot(seqarr, newboot, newmap, spans, loci):\n cidx = 0\n for i in xrange(loci.shape[0]):\n x1 = spans[loci[i]][0]\n x2 = spans[loci[i]][1]\n cols = seqarr[:, x1:x2]\n cord = np.random.choice(cols.shape[1], cols.shape[1], replace=False)\n rcols = cols[:, cord]\n newboot[:, cidx:cidx+cols.shape[1]] = rcols\n newmap[cidx: cidx+cols.shape[1], 0] = i+1\n cidx += cols.shape[1]\n return newboot, newmap", - "docstring": "fills the new bootstrap resampled array" - }, - { - "code": "def event(self, event):\n logger.debug(u\"TCP transport event: {0}\".format(event))\n if self._stream:\n event.stream = self._stream\n self._event_queue.put(event)", - "docstring": "Pass an event to the target stream or just log it." - }, - { - "code": "def new_address(self, sender=None, nonce=None):\n if sender is not None and nonce is None:\n nonce = self.get_nonce(sender)\n new_address = self.calculate_new_address(sender, nonce)\n if sender is None and new_address in self:\n return self.new_address(sender, nonce)\n return new_address", - "docstring": "Create a fresh 160bit address" - }, - { - "code": "def install(self, package):\n logger.debug('Installing ' + package)\n shell.run(self.pip_path, 'install', package)", - "docstring": "Install a python package using pip" - }, - { - "code": "def remove_decorator(source: str):\n lines = source.splitlines()\n atok = asttokens.ASTTokens(source, parse=True)\n for node in ast.walk(atok.tree):\n if isinstance(node, ast.FunctionDef):\n break\n if node.decorator_list:\n deco_first = node.decorator_list[0]\n deco_last = node.decorator_list[-1]\n line_first = atok.tokens[deco_first.first_token.index - 1].start[0]\n line_last = atok.tokens[deco_last.last_token.index + 1].start[0]\n lines = lines[:line_first - 1] + lines[line_last:]\n return \"\\n\".join(lines) + \"\\n\"", - "docstring": "Remove decorators from function definition" - }, - { - "code": "def decorator(decorator_func):\n assert callable(decorator_func), type(decorator_func)\n def _decorator(func=None, **kwargs):\n assert func is None or callable(func), type(func)\n if func:\n return decorator_func(func, **kwargs)\n else:\n def _decorator_helper(func):\n return decorator_func(func, **kwargs)\n return _decorator_helper\n return _decorator", - "docstring": "Allows a decorator to be called with or without keyword arguments." - }, - { - "code": "def add_model_file(self, model_fpath, position=1, file_id=None):\n if file_id is None:\n file_id = self.make_unique_id('file_input')\n ret_data = self.file_create(File.from_file(model_fpath, position,\n file_id))\n return ret_data", - "docstring": "Add a kappa model from a file at given path to the project." - }, - { - "code": "def _rc_renamenx(self, src, dst):\n \"Rename key ``src`` to ``dst`` if ``dst`` doesn't already exist\"\n if self.exists(dst):\n return False\n return self._rc_rename(src, dst)", - "docstring": "Rename key ``src`` to ``dst`` if ``dst`` doesn't already exist" - }, - { - "code": "def n_p(self):\n return 2*_sltr.GeV2joule(self.E)*_spc.epsilon_0 / (self.beta*_spc.elementary_charge)**2", - "docstring": "The plasma density in SI units." - }, - { - "code": "def format_to_csv(filename, skiprows=0, delimiter=\"\"):\n if not delimiter:\n delimiter = \"\\t\"\n input_file = open(filename, \"r\")\n if skiprows:\n [input_file.readline() for _ in range(skiprows)]\n new_filename = os.path.splitext(filename)[0] + \".csv\"\n output_file = open(new_filename, \"w\")\n header = input_file.readline().split()\n reader = csv.DictReader(input_file, fieldnames=header, delimiter=delimiter)\n writer = csv.DictWriter(output_file, fieldnames=header, delimiter=\",\")\n writer.writerow(dict((x, x) for x in header))\n for line in reader:\n if None in line: del line[None]\n writer.writerow(line)\n input_file.close()\n output_file.close()\n print \"Saved %s.\" % new_filename", - "docstring": "Convert a file to a .csv file" - }, - { - "code": "def url_from_path(path):\n if os.sep != '/':\n path = '/'.join(path.split(os.sep))\n return quote(path)", - "docstring": "Transform path to url, converting backslashes to slashes if needed." - }, - { - "code": "def aligned_indel_filter(clust, max_internal_indels):\n lclust = clust.split()\n try:\n seq1 = [i.split(\"nnnn\")[0] for i in lclust[1::2]]\n seq2 = [i.split(\"nnnn\")[1] for i in lclust[1::2]]\n intindels1 = [i.rstrip(\"-\").lstrip(\"-\").count(\"-\") for i in seq1]\n intindels2 = [i.rstrip(\"-\").lstrip(\"-\").count(\"-\") for i in seq2]\n intindels = intindels1 + intindels2\n if max(intindels) > max_internal_indels:\n return 1\n except IndexError:\n seq1 = lclust[1::2]\n intindels = [i.rstrip(\"-\").lstrip(\"-\").count(\"-\") for i in seq1]\n if max(intindels) > max_internal_indels:\n return 1 \n return 0", - "docstring": "checks for too many internal indels in muscle aligned clusters" - }, - { - "code": "def _on_event(self, _):\n self.sort(key=lambda conv_button: conv_button.last_modified,\n reverse=True)", - "docstring": "Re-order the conversations when an event occurs." - }, - { - "code": "def credentials(self):\n ctx = _app_ctx_stack.top\n if not hasattr(ctx, _CREDENTIALS_KEY):\n ctx.google_oauth2_credentials = self.storage.get()\n return ctx.google_oauth2_credentials", - "docstring": "The credentials for the current user or None if unavailable." - }, - { - "code": "def dstat(inarr, taxdict, mindict=1, nboots=1000, name=0):\n if isinstance(inarr, list):\n arr, _ = _loci_to_arr(inarr, taxdict, mindict)\n if arr.shape[1] == 4:\n res, boots = _get_signif_4(arr, nboots)\n res = pd.DataFrame(res, \n columns=[name],\n index=[\"Dstat\", \"bootmean\", \"bootstd\", \"Z\", \"ABBA\", \"BABA\", \"nloci\"])\n else:\n res, boots = _get_signif_5(arr, nboots)\n res = pd.DataFrame(res,\n index=[\"p3\", \"p4\", \"shared\"], \n columns=[\"Dstat\", \"bootmean\", \"bootstd\", \"Z\", \"ABxxA\", \"BAxxA\", \"nloci\"]\n )\n return res.T, boots", - "docstring": "private function to perform a single D-stat test" - }, - { - "code": "def bruteforce(users, domain, password, host):\n cs = CredentialSearch(use_pipe=False)\n print_notification(\"Connecting to {}\".format(host))\n s = Server(host)\n c = Connection(s)\n for user in users:\n if c.rebind(user=\"{}\\\\{}\".format(domain, user.username), password=password, authentication=NTLM):\n print_success('Success for: {}:{}'.format(user.username, password))\n credential = cs.find_object(\n user.username, password, domain=domain, host_ip=host)\n if not credential:\n credential = Credential(username=user.username, secret=password,\n domain=domain, host_ip=host, type=\"plaintext\", port=389)\n credential.add_tag(tag)\n credential.save()\n user.add_tag(tag)\n user.save()\n else:\n print_error(\"Fail for: {}:{}\".format(user.username, password))", - "docstring": "Performs a bruteforce for the given users, password, domain on the given host." - }, - { - "code": "def merge_ordered(ordereds: typing.Iterable[typing.Any]) -> typing.Iterable[typing.Any]:\n seen_set = set()\n add_seen = seen_set.add\n return reversed(tuple(map(\n lambda obj: add_seen(obj) or obj,\n filterfalse(\n seen_set.__contains__,\n chain.from_iterable(map(reversed, reversed(ordereds))),\n ),\n )))", - "docstring": "Merge multiple ordered so that within-ordered order is preserved" - }, - { - "code": "def _set_pixel_and_convert_color(self, x, y, color):\n if color is None:\n return\n color = self._convert_color_to_rrggbb(color)\n self._set_pixel(x, y, color)", - "docstring": "set the pixel but convert the color before." - }, - { - "code": "def fromutc(self, dt):\n if not isinstance(dt, datetime):\n raise TypeError(\"fromutc() requires a datetime argument\")\n if dt.tzinfo is not self:\n raise ValueError(\"dt.tzinfo is not self\")\n transitions = self.transitions(dt.year)\n if transitions is None:\n return dt + self.utcoffset(dt)\n dston, dstoff = transitions\n dston -= self._std_offset\n dstoff -= self._std_offset\n utc_transitions = (dston, dstoff)\n dt_utc = dt.replace(tzinfo=None)\n isdst = self._naive_isdst(dt_utc, utc_transitions)\n if isdst:\n dt_wall = dt + self._dst_offset\n else:\n dt_wall = dt + self._std_offset\n _fold = int(not isdst and self.is_ambiguous(dt_wall))\n return enfold(dt_wall, fold=_fold)", - "docstring": "Given a datetime in UTC, return local time" - }, - { - "code": "def emulate(self, instruction):\n while True:\n self._should_try_again = False\n self._to_raise = None\n self._step(instruction)\n if not self._should_try_again:\n break", - "docstring": "Wrapper that runs the _step function in a loop while handling exceptions" - }, - { - "code": "def as_DAVError(e):\n if isinstance(e, DAVError):\n return e\n elif isinstance(e, Exception):\n return DAVError(HTTP_INTERNAL_ERROR, src_exception=e)\n else:\n return DAVError(HTTP_INTERNAL_ERROR, \"{}\".format(e))", - "docstring": "Convert any non-DAVError exception to HTTP_INTERNAL_ERROR." - }, - { - "code": "def objHasUnsavedChanges(self):\n\t\tif not self.obj:\n\t\t\treturn False\n\t\treturn self.obj.hasUnsavedChanges(cascadeObjects=True)", - "docstring": "objHasUnsavedChanges - Check if any object has unsaved changes, cascading." - }, - { - "code": "def delta(self):\n v = float(self._logistic.value)\n if v > 0.0:\n v = 1 / (1 + exp(-v))\n else:\n v = exp(v)\n v = v / (v + 1.0)\n return min(max(v, epsilon.tiny), 1 - epsilon.tiny)", - "docstring": "Variance ratio between ``K`` and ``I``." - }, - { - "code": "def _process_event(self, key, mask):\n self._logger.debug('Processing event with key {} and mask {}'.format(key, mask))\n fileobj, (reader, writer) = key.fileobj, key.data\n if mask & selectors.EVENT_READ and reader is not None:\n if reader._cancelled:\n self.remove_reader(fileobj)\n else:\n self._logger.debug('Invoking reader callback: {}'.format(reader))\n reader._run()\n if mask & selectors.EVENT_WRITE and writer is not None:\n if writer._cancelled:\n self.remove_writer(fileobj)\n else:\n self._logger.debug('Invoking writer callback: {}'.format(writer))\n writer._run()", - "docstring": "Selector has delivered us an event." - }, - { - "code": "def random_product(iter1, iter2):\n pool1 = tuple(iter1)\n pool2 = tuple(iter2)\n ind1 = random.sample(pool1, 2)\n ind2 = random.sample(pool2, 2)\n return tuple(ind1+ind2)", - "docstring": "random sampler for equal_splits func" - }, - { - "code": "def publications(self):\n if self.library_type != \"users\":\n raise ze.CallDoesNotExist(\n \"This API call does not exist for group libraries\"\n )\n query_string = \"/{t}/{u}/publications/items\"\n return self._build_query(query_string)", - "docstring": "Return the contents of My Publications" - }, - { - "code": "def outer_left_join(self, join_streamlet, window_config, join_function):\n from heronpy.streamlet.impl.joinbolt import JoinStreamlet, JoinBolt\n join_streamlet_result = JoinStreamlet(JoinBolt.OUTER_LEFT, window_config,\n join_function, self, join_streamlet)\n self._add_child(join_streamlet_result)\n join_streamlet._add_child(join_streamlet_result)\n return join_streamlet_result", - "docstring": "Return a new Streamlet by left join_streamlet with this streamlet" - }, - { - "code": "def _getActions(self):\n actions = _a11y.AXUIElement._getActions(self)\n return [action[2:] for action in actions]", - "docstring": "Retrieve a list of actions supported by the object." - }, - { - "code": "def exchange_token(self, code):\n access_token_url = OAUTH_ROOT + '/access_token'\n params = {\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'redirect_uri': self.redirect_uri,\n 'code': code,\n }\n resp = requests.get(access_token_url, params=params)\n if not resp.ok:\n raise MixcloudOauthError(\"Could not get access token.\")\n return resp.json()['access_token']", - "docstring": "Exchange the authorization code for an access token." - }, - { - "code": "def calc_J(self):\n del self.J\n self.J = np.zeros([self.param_vals.size, self.data.size])\n dp = np.zeros_like(self.param_vals)\n f0 = self.model.copy()\n for a in range(self.param_vals.size):\n dp *= 0\n dp[a] = self.dl[a]\n f1 = self.func(self.param_vals + dp, *self.func_args, **self.func_kwargs)\n grad_func = (f1 - f0) / dp[a]\n self.J[a] = -grad_func", - "docstring": "Updates self.J, returns nothing" - }, - { - "code": "def config(ctx, key, value, remove, edit):\n conf = ctx.obj[\"conf\"]\n if not edit and not key:\n raise click.BadArgumentUsage(\"You have to specify either a key or use --edit.\")\n if edit:\n return click.edit(filename=conf.config_file)\n if remove:\n try:\n conf.cfg.remove_option(key[0], key[1])\n except Exception as e:\n logger.debug(e)\n else:\n conf.write_config()\n return\n if not value:\n try:\n click.echo(conf.cfg.get(key[0], key[1]))\n except Exception as e:\n logger.debug(e)\n return\n if not conf.cfg.has_section(key[0]):\n conf.cfg.add_section(key[0])\n conf.cfg.set(key[0], key[1], value)\n conf.write_config()", - "docstring": "Get or set config item." - }, - { - "code": "async def _remove(self, ctx, index: int):\r\n player = self.bot.lavalink.players.get(ctx.guild.id)\r\n if not player.queue:\r\n return await ctx.send('Nothing queued.')\r\n if index > len(player.queue) or index < 1:\r\n return await ctx.send(f'Index has to be **between** 1 and {len(player.queue)}')\r\n index -= 1\r\n removed = player.queue.pop(index)\r\n await ctx.send(f'Removed **{removed.title}** from the queue.')", - "docstring": "Removes an item from the player's queue with the given index." - }, - { - "code": "def p_file_comment(self, f_term, predicate):\n try:\n for _, _, comment in self.graph.triples((f_term, predicate, None)):\n self.builder.set_file_comment(self.doc, six.text_type(comment))\n except CardinalityError:\n self.more_than_one_error('file comment')", - "docstring": "Sets file comment text." - }, - { - "code": "def predictor(self, (i, j, A, alpha, Bb)):\n \"Add to chart any rules for B that could help extend this edge.\"\n B = Bb[0]\n if B in self.grammar.rules:\n for rhs in self.grammar.rewrites_for(B):\n self.add_edge([j, j, B, [], rhs])", - "docstring": "Add to chart any rules for B that could help extend this edge." - }, - { - "code": "def animated_gif_to_colorlists(image, container=list):\n deprecated.deprecated('util.gif.animated_gif_to_colorlists')\n from PIL import ImageSequence\n it = ImageSequence.Iterator(image)\n return [image_to_colorlist(i, container) for i in it]", - "docstring": "Given an animated GIF, return a list with a colorlist for each frame." - }, - { - "code": "def colorpalette(self, colorpalette):\n if isinstance(colorpalette, str):\n colorpalette = colors.parse_colors(colorpalette)\n self._colorpalette = colors.sanitize_color_palette(colorpalette)", - "docstring": "Set the colorpalette which should be used" - }, - { - "code": "def update_notes(self):\n fields = record_get_field_instances(self.record, '500')\n for field in fields:\n subs = field_get_subfields(field)\n for sub in subs.get('a', []):\n sub = sub.strip()\n if sub.startswith(\"*\") and sub.endswith(\"*\"):\n record_delete_field(self.record, tag=\"500\",\n field_position_global=field[4])", - "docstring": "Remove INSPIRE specific notes." - }, - { - "code": "def index_collection(self, filenames):\n \"Index a whole collection of files.\"\n for filename in filenames:\n self.index_document(open(filename).read(), filename)", - "docstring": "Index a whole collection of files." - }, - { - "code": "def check_me(self):\n \"Check that my fields make sense.\"\n assert len(self.attrnames) == len(self.attrs)\n assert self.target in self.attrs\n assert self.target not in self.inputs\n assert set(self.inputs).issubset(set(self.attrs))\n map(self.check_example, self.examples)", - "docstring": "Check that my fields make sense." - }, - { - "code": "def _StructMessageToJsonObject(message, unused_including_default=False):\n fields = message.fields\n ret = {}\n for key in fields:\n ret[key] = _ValueMessageToJsonObject(fields[key])\n return ret", - "docstring": "Converts Struct message according to Proto3 JSON Specification." - }, - { - "code": "def build_swig():\n print(\"Looking for FANN libs...\")\n find_fann()\n print(\"running SWIG...\")\n swig_bin = find_swig()\n swig_cmd = [swig_bin, '-c++', '-python', 'fann2/fann2.i']\n subprocess.Popen(swig_cmd).wait()", - "docstring": "Run SWIG with specified parameters" - }, - { - "code": "def post(self, request, *args, **kwargs):\n enterprise_customer_uuid, course_run_id, course_key, program_uuid = RouterView.get_path_variables(**kwargs)\n enterprise_customer = get_enterprise_customer_or_404(enterprise_customer_uuid)\n if course_key:\n context_data = get_global_context(request, enterprise_customer)\n try:\n kwargs['course_id'] = RouterView.get_course_run_id(request.user, enterprise_customer, course_key)\n except Http404:\n error_code = 'ENTRV001'\n log_message = (\n 'Could not find course run with id {course_run_id} '\n 'for course key {course_key} and '\n 'for enterprise_customer_uuid {enterprise_customer_uuid} '\n 'and program {program_uuid}. '\n 'Returned error code {error_code} to user {userid}'.format(\n course_key=course_key,\n course_run_id=course_run_id,\n enterprise_customer_uuid=enterprise_customer_uuid,\n error_code=error_code,\n userid=request.user.id,\n program_uuid=program_uuid,\n )\n )\n return render_page_with_error_code_message(request, context_data, error_code, log_message)\n return self.redirect(request, *args, **kwargs)", - "docstring": "Run some custom POST logic for Enterprise workflows before routing the user through existing views." - }, - { - "code": "def abs_img(img):\n bool_img = np.abs(read_img(img).get_data())\n return bool_img.astype(int)", - "docstring": "Return an image with the binarised version of the data of `img`." - }, - { - "code": "def create_new_locale(\n self,\n template_id,\n locale,\n version_name,\n subject,\n text='',\n html='',\n timeout=None\n ):\n payload = {\n 'locale': locale,\n 'name': version_name,\n 'subject': subject\n }\n if html:\n payload['html'] = html\n if text:\n payload['text'] = text\n return self._api_request(\n self.TEMPLATES_LOCALES_ENDPOINT % template_id,\n self.HTTP_POST,\n payload=payload,\n timeout=timeout\n )", - "docstring": "API call to create a new locale and version of a template" - }, - { - "code": "def do_restart(self, line):\n self.bot._frame = 0\n self.bot._namespace.clear()\n self.bot._namespace.update(self.bot._initial_namespace)", - "docstring": "Attempt to restart the bot." - }, - { - "code": "def _retry_storage_check(exception):\n now = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')\n print_error(\n '%s: Exception %s: %s' % (now, type(exception).__name__, str(exception)))\n return isinstance(exception, oauth2client.client.AccessTokenRefreshError)", - "docstring": "Return True if we should retry, False otherwise." - }, - { - "code": "def unset_logging(self):\n if self.logger_flag is True:\n return\n root_logger = logging.getLogger()\n for hndlr in root_logger.handlers:\n if hndlr not in self.prior_loghandlers:\n hndlr.setLevel(logging.ERROR)\n self.logger_flag = True", - "docstring": "Mute newly added handlers to the root level, right after calling executor.status" - }, - { - "code": "def repr_imgs(imgs):\n if isinstance(imgs, string_types):\n return imgs\n if isinstance(imgs, collections.Iterable):\n return '[{}]'.format(', '.join(repr_imgs(img) for img in imgs))\n try:\n filename = imgs.get_filename()\n if filename is not None:\n img_str = \"{}('{}')\".format(imgs.__class__.__name__, filename)\n else:\n img_str = \"{}(shape={}, affine={})\".format(imgs.__class__.__name__,\n repr(get_shape(imgs)),\n repr(imgs.get_affine()))\n except Exception as exc:\n log.error('Error reading attributes from img.get_filename()')\n return repr(imgs)\n else:\n return img_str", - "docstring": "Printing of img or imgs" - }, - { - "code": "def _oauth2_web_server_flow_params(kwargs):\n params = {\n 'access_type': 'offline',\n 'response_type': 'code',\n }\n params.update(kwargs)\n approval_prompt = params.get('approval_prompt')\n if approval_prompt is not None:\n logger.warning(\n 'The approval_prompt parameter for OAuth2WebServerFlow is '\n 'deprecated. Please use the prompt parameter instead.')\n if approval_prompt == 'force':\n logger.warning(\n 'approval_prompt=\"force\" has been adjusted to '\n 'prompt=\"consent\"')\n params['prompt'] = 'consent'\n del params['approval_prompt']\n return params", - "docstring": "Configures redirect URI parameters for OAuth2WebServerFlow." - }, - { - "code": "def from_spec(spec, kwargs=None):\n layer = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.core.networks.layers,\n kwargs=kwargs\n )\n assert isinstance(layer, Layer)\n return layer", - "docstring": "Creates a layer from a specification dict." - }, - { - "code": "def cat_file(path):\n cmd = [\"cat\", path]\n status, stdout, _ = cij.ssh.command(cmd, shell=True, echo=True)\n if status:\n raise RuntimeError(\"cij.nvme.env: cat %s failed\" % path)\n return stdout.strip()", - "docstring": "Cat file and return content" - }, - { - "code": "def clear_obj(self, obj):\n removed = self.cellgraph.clear_obj(obj)\n for node in removed:\n del node[OBJ].data[node[KEY]]", - "docstring": "Clear values and nodes of `obj` and their dependants." - }, - { - "code": "def abfProtocol(fname):\n f=open(fname,'rb')\n raw=f.read(30*1000)\n f.close()\n raw=raw.decode(\"utf-8\",\"ignore\")\n raw=raw.split(\"Clampex\")[1].split(\".pro\")[0]\n protocol = os.path.basename(raw)\n protocolID = protocol.split(\" \")[0]\n return protocolID", - "docstring": "Determine the protocol used to record an ABF file" - }, - { - "code": "def where_cross(data,threshold):\n Is=np.where(data>threshold)[0]\n Is=np.concatenate(([0],Is))\n Ds=Is[:-1]-Is[1:]+1\n return Is[np.where(Ds)[0]+1]", - "docstring": "return a list of Is where the data first crosses above threshold." - }, - { - "code": "def _get_storage_service(credentials):\n if credentials is None:\n credentials = oauth2client.client.GoogleCredentials.get_application_default(\n )\n return discovery.build('storage', 'v1', credentials=credentials)", - "docstring": "Get a storage client using the provided credentials or defaults." - }, - { - "code": "def transform_description(self, content_metadata_item):\n description_with_locales = []\n for locale in self.enterprise_configuration.get_locales():\n description_with_locales.append({\n 'locale': locale,\n 'value': (\n content_metadata_item.get('full_description') or\n content_metadata_item.get('short_description') or\n content_metadata_item.get('title', '')\n )\n })\n return description_with_locales", - "docstring": "Return the description of the content item." - }, - { - "code": "def save(self):\n\t\tif len(self) == 0:\n\t\t\treturn []\n\t\tmdl = self.getModel()\n\t\treturn mdl.saver.save(self)", - "docstring": "save - Save all objects in this list" - }, - { - "code": "def on_message(self, websocket, message):\n waiter = self._waiter\n self._waiter = None\n encoded = json.loads(message)\n event = encoded.get('event')\n channel = encoded.get('channel')\n data = json.loads(encoded.get('data'))\n try:\n if event == PUSHER_ERROR:\n raise PusherError(data['message'], data['code'])\n elif event == PUSHER_CONNECTION:\n self.socket_id = data.get('socket_id')\n self.logger.info('Succesfully connected on socket %s',\n self.socket_id)\n waiter.set_result(self.socket_id)\n elif event == PUSHER_SUBSCRIBED:\n self.logger.info('Succesfully subscribed to %s',\n encoded.get('channel'))\n elif channel:\n self[channel]._event(event, data)\n except Exception as exc:\n if waiter:\n waiter.set_exception(exc)\n else:\n self.logger.exception('pusher error')", - "docstring": "Handle websocket incoming messages" - }, - { - "code": "async def send_offnetwork_invitation(\n self, send_offnetwork_invitation_request\n ):\n response = hangouts_pb2.SendOffnetworkInvitationResponse()\n await self._pb_request('devices/sendoffnetworkinvitation',\n send_offnetwork_invitation_request,\n response)\n return response", - "docstring": "Send an email to invite a non-Google contact to Hangouts." - }, - { - "code": "def print(self):\r\n print(\"---[ START {} ]---\".format(self.name))\r\n for i, line in enumerate(self.lines):\r\n print(\"{}: {}\".format(str(i).zfill(3), line))\r\n print(\"---[ END {} ]---\".format(self.name))", - "docstring": "Print the shader lines" - }, - { - "code": "def build_sdist(sdist_directory, config_settings):\n backend = _build_backend()\n try:\n return backend.build_sdist(sdist_directory, config_settings)\n except getattr(backend, 'UnsupportedOperation', _DummyException):\n raise GotUnsupportedOperation(traceback.format_exc())", - "docstring": "Invoke the mandatory build_sdist hook." - }, - { - "code": "def _wrap_color(self, code, text, format=None, style=None):\n color = None\n if code[:3] == self.bg.PREFIX:\n color = self.bg.COLORS.get(code, None)\n if not color:\n color = self.fg.COLORS.get(code, None)\n if not color:\n raise Exception('Color code not found')\n if format and format not in self.formats:\n raise Exception('Color format not found')\n fmt = \"0;\"\n if format == 'bold':\n fmt = \"1;\"\n elif format == 'underline':\n fmt = \"4;\"\n parts = color.split('[')\n color = '{0}[{1}{2}'.format(parts[0], fmt, parts[1])\n if self.has_colors and self.colors_enabled:\n st = ''\n if style:\n st = self.st.COLORS.get(style, '')\n return \"{0}{1}{2}{3}\".format(st, color, text, self.st.COLORS['reset_all'])\n else:\n return text", - "docstring": "Colors text with code and given format" - }, - { - "code": "def log_calls(function):\n def wrapper(self,*args,**kwargs): \n self.log.log(group=function.__name__,message='Enter') \n function(self,*args,**kwargs)\n self.log.log(group=function.__name__,message='Exit') \n return wrapper", - "docstring": "Decorator that logs function calls in their self.log" - }, - { - "code": "def tail(filename, n):\n size = os.path.getsize(filename)\n with open(filename, \"rb\") as f:\n fm = mmap.mmap(f.fileno(), 0, mmap.MAP_SHARED, mmap.PROT_READ)\n try:\n for i in xrange(size - 1, -1, -1):\n if fm[i] == '\\n':\n n -= 1\n if n == -1:\n break\n return fm[i + 1 if i else 0:].splitlines()\n finally:\n fm.close()", - "docstring": "Returns last n lines from the filename. No exception handling" - }, - { - "code": "def sort_by_name(infile, outfile):\n seqs = {}\n file_to_dict(infile, seqs)\n fout = utils.open_file_write(outfile)\n for name in sorted(seqs):\n print(seqs[name], file=fout)\n utils.close(fout)", - "docstring": "Sorts input sequence file by sort -d -k1,1, writes sorted output file." - }, - { - "code": "def _get_ckptmgr_process(self):\n ckptmgr_main_class = 'org.apache.heron.ckptmgr.CheckpointManager'\n ckptmgr_ram_mb = self.checkpoint_manager_ram / (1024 * 1024)\n ckptmgr_cmd = [os.path.join(self.heron_java_home, \"bin/java\"),\n '-Xms%dM' % ckptmgr_ram_mb,\n '-Xmx%dM' % ckptmgr_ram_mb,\n '-XX:+PrintCommandLineFlags',\n '-verbosegc',\n '-XX:+PrintGCDetails',\n '-XX:+PrintGCTimeStamps',\n '-XX:+PrintGCDateStamps',\n '-XX:+PrintGCCause',\n '-XX:+UseGCLogFileRotation',\n '-XX:NumberOfGCLogFiles=5',\n '-XX:GCLogFileSize=100M',\n '-XX:+PrintPromotionFailure',\n '-XX:+PrintTenuringDistribution',\n '-XX:+PrintHeapAtGC',\n '-XX:+HeapDumpOnOutOfMemoryError',\n '-XX:+UseConcMarkSweepGC',\n '-XX:+UseConcMarkSweepGC',\n '-Xloggc:log-files/gc.ckptmgr.log',\n '-Djava.net.preferIPv4Stack=true',\n '-cp',\n self.checkpoint_manager_classpath,\n ckptmgr_main_class,\n '-t' + self.topology_name,\n '-i' + self.topology_id,\n '-c' + self.ckptmgr_ids[self.shard],\n '-p' + self.checkpoint_manager_port,\n '-f' + self.stateful_config_file,\n '-o' + self.override_config_file,\n '-g' + self.heron_internals_config_file]\n retval = {}\n retval[self.ckptmgr_ids[self.shard]] = Command(ckptmgr_cmd, self.shell_env)\n return retval", - "docstring": "Get the command to start the checkpoint manager process" - }, - { - "code": "def _parse_image_uri(self, raw_uri):\n docker_uri = os.path.join(self._relative_path,\n raw_uri.replace('https://', 'https/', 1))\n return docker_uri", - "docstring": "Return a valid docker_path from a Google Persistent Disk url." - }, - { - "code": "def try_match(request_origin, maybe_regex):\n if isinstance(maybe_regex, RegexObject):\n return re.match(maybe_regex, request_origin)\n elif probably_regex(maybe_regex):\n return re.match(maybe_regex, request_origin, flags=re.IGNORECASE)\n else:\n try:\n return request_origin.lower() == maybe_regex.lower()\n except AttributeError:\n return request_origin == maybe_regex", - "docstring": "Safely attempts to match a pattern or string to a request origin." - }, - { - "code": "def expire_password(self, username):\n r = self.local_renderer\n r.env.username = username\n r.sudo('chage -d 0 {username}')", - "docstring": "Forces the user to change their password the next time they login." - }, - { - "code": "def create_tar(tar_filename, files, config_dir, config_files):\n with contextlib.closing(tarfile.open(tar_filename, 'w:gz', dereference=True)) as tar:\n for filename in files:\n if os.path.isfile(filename):\n tar.add(filename, arcname=os.path.basename(filename))\n else:\n raise Exception(\"%s is not an existing file\" % filename)\n if os.path.isdir(config_dir):\n tar.add(config_dir, arcname=get_heron_sandbox_conf_dir())\n else:\n raise Exception(\"%s is not an existing directory\" % config_dir)\n for filename in config_files:\n if os.path.isfile(filename):\n arcfile = os.path.join(get_heron_sandbox_conf_dir(), os.path.basename(filename))\n tar.add(filename, arcname=arcfile)\n else:\n raise Exception(\"%s is not an existing file\" % filename)", - "docstring": "Create a tar file with a given set of files" - }, - { - "code": "def _matrix2dict(matrix, etype=False):\n n = len(matrix)\n adj = {k: {} for k in range(n)}\n for k in range(n):\n for j in range(n):\n if matrix[k, j] != 0:\n adj[k][j] = {} if not etype else matrix[k, j]\n return adj", - "docstring": "Takes an adjacency matrix and returns an adjacency list." - }, - { - "code": "def find_matching(cls, path, patterns):\n for pattern in patterns:\n if pattern.match(path):\n yield pattern", - "docstring": "Yield all matching patterns for path." - }, - { - "code": "def _check_for_exceptions(self, resp, multiple_rates):\n if resp['rCode'] != 100:\n raise exceptions.get_exception_for_code(resp['rCode'])(resp)\n results = resp['results']\n if len(results) == 0:\n raise exceptions.ZipTaxNoResults('No results found')\n if len(results) > 1 and not multiple_rates:\n rates = [result['taxSales'] for result in results]\n if len(set(rates)) != 1:\n raise exceptions.ZipTaxMultipleResults('Multiple results found but requested only one')", - "docstring": "Check if there are exceptions that should be raised" - }, - { - "code": "def post(self, request, response):\n if self.slug is not None:\n raise http.exceptions.NotImplemented()\n self.assert_operations('create')\n data = self._clean(None, self.request.read(deserialize=True))\n item = self.create(data)\n self.response.status = http.client.CREATED\n self.make_response(item)", - "docstring": "Processes a `POST` request." - }, - { - "code": "def _get_version(self):\n if \"version\" in self.document.attrib:\n value = self.document.attrib[\"version\"].lower()\n if value in allowed_versions[self.params['service']]:\n self.params[\"version\"] = value\n else:\n raise OWSInvalidParameterValue(\"Version %s is not supported\" % value, value=\"version\")\n elif self._get_request_type() == \"getcapabilities\":\n self.params[\"version\"] = None\n else:\n raise OWSMissingParameterValue('Parameter \"version\" is missing', value=\"version\")\n return self.params[\"version\"]", - "docstring": "Find requested version in POST request." - }, - { - "code": "def course_key_is_valid(course_key):\n if course_key is None:\n return False\n try:\n CourseKey.from_string(text_type(course_key))\n except (InvalidKeyError, UnicodeDecodeError):\n return False\n return True", - "docstring": "Course key object validation" - }, - { - "code": "def save(self, filename, imdata, **data):\n if isinstance(imdata, numpy.ndarray):\n imdata = Image.fromarray(numpy.uint8(imdata))\n elif isinstance(imdata, Image.Image):\n imdata.save(self._savepath(filename))", - "docstring": "Data may be either a PIL Image object or a Numpy array." - }, - { - "code": "def expandvars_dict(settings):\n return dict(\n (key, os.path.expandvars(value))\n for key, value in settings.iteritems()\n )", - "docstring": "Expands all environment variables in a settings dictionary." - }, - { - "code": "def hash_id(iso_timestamp, msg):\n return '{0}-{1}'.format(iso_timestamp,\n hashlib.sha1(\n msg.get('unique_id').encode('utf-8') +\n str(msg.get('visitor_id')).\n encode('utf-8')).\n hexdigest())", - "docstring": "Generate event id, optimized for ES." - }, - { - "code": "def cluster(node):\n cluster_client = PolyaxonClient().cluster\n if node:\n try:\n node_config = cluster_client.get_node(node)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not load node `{}` info.'.format(node))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n get_node_info(node_config)\n else:\n try:\n cluster_config = cluster_client.get_cluster()\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not load cluster info.')\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n get_cluster_info(cluster_config)", - "docstring": "Get cluster and nodes info." - }, - { - "code": "def award_project_bid(session, bid_id):\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n bid_data = {\n 'action': 'award'\n }\n endpoint = 'bids/{}'.format(bid_id)\n response = make_put_request(session, endpoint, headers=headers,\n params_data=bid_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n json_data = response.json()\n raise BidNotAwardedException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id']\n )", - "docstring": "Award a bid on a project" - }, - { - "code": "def function_call(type_spec, *args):\n m = re.match(r\"(?P[a-zA-Z_][a-zA-Z_0-9]*)(?P\\(.*\\))\", type_spec)\n if not m:\n raise EthereumError(\"Function signature expected\")\n ABI._check_and_warn_num_args(type_spec, *args)\n result = ABI.function_selector(type_spec)\n result += ABI.serialize(m.group('type'), *args)\n return result", - "docstring": "Build transaction data from function signature and arguments" - }, - { - "code": "def repack(self):\n items = self.grouped_filter().order_by('rank').select_for_update()\n for count, item in enumerate(items):\n item.rank = count + 1\n item.save(rerank=False)", - "docstring": "Removes any blank ranks in the order." - }, - { - "code": "def available_drivers():\n global __modules\n global __available\n if type(__modules) is not list:\n __modules = list(__modules)\n if not __available:\n __available = [d.ahioDriverInfo.NAME\n for d in __modules\n if d.ahioDriverInfo.AVAILABLE]\n return __available", - "docstring": "Returns a list of available drivers names." - }, - { - "code": "def byte_number_string(\n number, thousandsSep=True, partition=False, base1024=True, appendBytes=True\n):\n magsuffix = \"\"\n bytesuffix = \"\"\n if partition:\n magnitude = 0\n if base1024:\n while number >= 1024:\n magnitude += 1\n number = number >> 10\n else:\n while number >= 1000:\n magnitude += 1\n number /= 1000.0\n magsuffix = [\"\", \"K\", \"M\", \"G\", \"T\", \"P\"][magnitude]\n if appendBytes:\n if number == 1:\n bytesuffix = \" Byte\"\n else:\n bytesuffix = \" Bytes\"\n if thousandsSep and (number >= 1000 or magsuffix):\n snum = \"{:,d}\".format(number)\n else:\n snum = str(number)\n return \"{}{}{}\".format(snum, magsuffix, bytesuffix)", - "docstring": "Convert bytes into human-readable representation." - }, - { - "code": "def _AddPropertiesForExtensions(descriptor, cls):\n extension_dict = descriptor.extensions_by_name\n for extension_name, extension_field in extension_dict.items():\n constant_name = extension_name.upper() + \"_FIELD_NUMBER\"\n setattr(cls, constant_name, extension_field.number)", - "docstring": "Adds properties for all fields in this protocol message type." - }, - { - "code": "def new(n, prefix=None):\n if isinstance(n, Leaf):\n return Leaf(n.type, n.value, prefix=n.prefix if prefix is None else prefix)\n n.parent = None\n if prefix is not None:\n n.prefix = prefix\n return n", - "docstring": "lib2to3's AST requires unique objects as children." - }, - { - "code": "def field_xml_output(field, tag):\n marcxml = []\n if field[3]:\n marcxml.append(' %s' %\n (tag, MathMLParser.html_to_text(field[3])))\n else:\n marcxml.append(' ' %\n (tag, field[1], field[2]))\n marcxml += [_subfield_xml_output(subfield) for subfield in field[0]]\n marcxml.append(' ')\n return '\\n'.join(marcxml)", - "docstring": "Generate the XML for field 'field' and returns it as a string." - }, - { - "code": "def json_rpc_format(self):\n error = {\n 'name': text_type(self.__class__.__name__),\n 'code': self.code,\n 'message': '{0}'.format(text_type(self.message)),\n 'data': self.data\n }\n if current_app.config['DEBUG']:\n import sys, traceback\n error['stack'] = traceback.format_exc()\n error['executable'] = sys.executable\n return error", - "docstring": "Return the Exception data in a format for JSON-RPC" - }, - { - "code": "def convert_markdown(message):\n assert message['Content-Type'].startswith(\"text/markdown\")\n del message['Content-Type']\n message = make_message_multipart(message)\n for payload_item in set(message.get_payload()):\n if payload_item['Content-Type'].startswith('text/plain'):\n original_text = payload_item.get_payload()\n html_text = markdown.markdown(original_text)\n html_payload = future.backports.email.mime.text.MIMEText(\n \"{}\".format(html_text),\n \"html\",\n )\n message.attach(html_payload)\n return message", - "docstring": "Convert markdown in message text to HTML." - }, - { - "code": "def getdomain(self):\n sdlist = []\n while self.pos < len(self.field):\n if self.field[self.pos] in self.LWS:\n self.pos += 1\n elif self.field[self.pos] == '(':\n self.commentlist.append(self.getcomment())\n elif self.field[self.pos] == '[':\n sdlist.append(self.getdomainliteral())\n elif self.field[self.pos] == '.':\n self.pos += 1\n sdlist.append('.')\n elif self.field[self.pos] in self.atomends:\n break\n else: sdlist.append(self.getatom())\n return ''.join(sdlist)", - "docstring": "Get the complete domain name from an address." - }, - { - "code": "def _task_sort_function(task):\n return (task.get_field('create-time'), int(task.get_field('task-id', 0)),\n int(task.get_field('task-attempt', 0)))", - "docstring": "Return a tuple for sorting 'most recent first'." - }, - { - "code": "def _show_menu(self):\n current_widget = self._tabbed_window.get_current_widget()\n if hasattr(current_widget, 'get_menu_widget'):\n menu_widget = current_widget.get_menu_widget(self._hide_menu)\n overlay = urwid.Overlay(menu_widget, self._tabbed_window,\n align='center', width=('relative', 80),\n valign='middle', height=('relative', 80))\n self._urwid_loop.widget = overlay", - "docstring": "Show the overlay menu." - }, - { - "code": "def play(self, call_params):\n path = '/' + self.api_version + '/Play/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Play something on a Call Helper" - }, - { - "code": "def data_two_freqs(N=200):\n nn = arange(N)\n xx = cos(0.257*pi*nn) + sin(0.2*pi*nn) + 0.01*randn(nn.size)\n return xx", - "docstring": "A simple test example with two close frequencies" - }, - { - "code": "async def receive_json(self, content, **kwargs):\n if isinstance(content, dict) and \"stream\" in content and \"payload\" in content:\n steam_name = content[\"stream\"]\n payload = content[\"payload\"]\n if steam_name not in self.applications_accepting_frames:\n raise ValueError(\"Invalid multiplexed frame received (stream not mapped)\")\n await self.send_upstream(\n message={\n \"type\": \"websocket.receive\",\n \"text\": await self.encode_json(payload)\n },\n stream_name=steam_name\n )\n return\n else:\n raise ValueError(\"Invalid multiplexed **frame received (no channel/payload key)\")", - "docstring": "Rout the message down the correct stream." - }, - { - "code": "def _run(self):\n if self._has_run:\n raise RuntimeError(\"This spreader instance has already been run: \"\n \"create a new Spreader object for a new run.\")\n i = 1\n while self.event_heap.size() > 0 and len(self._uninfected_stops) > 0:\n event = self.event_heap.pop_next_event()\n this_stop = self._stop_I_to_spreading_stop[event.from_stop_I]\n if event.arr_time_ut > self.start_time_ut + self.max_duration_ut:\n break\n if this_stop.can_infect(event):\n target_stop = self._stop_I_to_spreading_stop[event.to_stop_I]\n already_visited = target_stop.has_been_visited()\n target_stop.visit(event)\n if not already_visited:\n self._uninfected_stops.remove(event.to_stop_I)\n print(i, self.event_heap.size())\n transfer_distances = self.gtfs.get_straight_line_transfer_distances(event.to_stop_I)\n self.event_heap.add_walk_events_to_heap(transfer_distances, event, self.start_time_ut,\n self.walk_speed, self._uninfected_stops,\n self.max_duration_ut)\n i += 1\n self._has_run = True", - "docstring": "Run the actual simulation." - }, - { - "code": "def send_zip(self, exercise, file, params):\n resp = self.post(\n exercise.return_url,\n params=params,\n files={\n \"submission[file]\": ('submission.zip', file)\n },\n data={\n \"commit\": \"Submit\"\n }\n )\n return self._to_json(resp)", - "docstring": "Send zipfile to TMC for given exercise" - }, - { - "code": "def calc_global_bbox(self, view_matrix, bbox_min, bbox_max):\n if self.matrix is not None:\n view_matrix = matrix44.multiply(self.matrix, view_matrix)\n if self.mesh:\n bbox_min, bbox_max = self.mesh.calc_global_bbox(view_matrix, bbox_min, bbox_max)\n for child in self.children:\n bbox_min, bbox_max = child.calc_global_bbox(view_matrix, bbox_min, bbox_max)\n return bbox_min, bbox_max", - "docstring": "Recursive calculation of scene bbox" - }, - { - "code": "def add_extension(module, name, code):\n code = int(code)\n if not 1 <= code <= 0x7fffffff:\n raise ValueError, \"code out of range\"\n key = (module, name)\n if (_extension_registry.get(key) == code and\n _inverted_registry.get(code) == key):\n return\n if key in _extension_registry:\n raise ValueError(\"key %s is already registered with code %s\" %\n (key, _extension_registry[key]))\n if code in _inverted_registry:\n raise ValueError(\"code %s is already in use for key %s\" %\n (code, _inverted_registry[code]))\n _extension_registry[key] = code\n _inverted_registry[code] = key", - "docstring": "Register an extension code." - }, - { - "code": "def parse_cmdline_kwargs(args):\n def parse(v):\n assert isinstance(v, str)\n try:\n return eval(v)\n except (NameError, SyntaxError):\n return v\n return {k: parse(v) for k,v in parse_unknown_args(args).items()}", - "docstring": "convert a list of '='-spaced command-line arguments to a dictionary, evaluating python objects when possible" - }, - { - "code": "def write_var_data(fd, data):\n fd.write(struct.pack('b3xI', etypes['miMATRIX']['n'], len(data)))\n fd.write(data)", - "docstring": "Write variable data to file" - }, - { - "code": "def _thumbnail_div(full_dir, fname, snippet, is_backref=False):\n thumb = os.path.join(full_dir, 'images', 'thumb',\n 'sphx_glr_%s_thumb.png' % fname[:-3])\n ref_name = os.path.join(full_dir, fname).replace(os.path.sep, '_')\n template = BACKREF_THUMBNAIL_TEMPLATE if is_backref else THUMBNAIL_TEMPLATE\n return template.format(snippet=snippet, thumbnail=thumb, ref_name=ref_name)", - "docstring": "Generates RST to place a thumbnail in a gallery" - }, - { - "code": "def set(self, model, value):\n self.validate(value)\n self._pop(model)\n value = self.serialize(value)\n model.tags.append(value)", - "docstring": "Set tag on model object." - }, - { - "code": "def _start_processes(self, commands):\n Log.info(\"Start processes\")\n processes_to_monitor = {}\n for (name, command) in commands.items():\n p = self._run_process(name, command)\n processes_to_monitor[p.pid] = ProcessInfo(p, name, command)\n log_pid_for_process(name, p.pid)\n with self.process_lock:\n self.processes_to_monitor.update(processes_to_monitor)", - "docstring": "Start all commands and add them to the dict of processes to be monitored" - }, - { - "code": "def make_vec_env(env_id, env_type, num_env, seed,\n wrapper_kwargs=None,\n start_index=0,\n reward_scale=1.0,\n flatten_dict_observations=True,\n gamestate=None):\n wrapper_kwargs = wrapper_kwargs or {}\n mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0\n seed = seed + 10000 * mpi_rank if seed is not None else None\n logger_dir = logger.get_dir()\n def make_thunk(rank):\n return lambda: make_env(\n env_id=env_id,\n env_type=env_type,\n mpi_rank=mpi_rank,\n subrank=rank,\n seed=seed,\n reward_scale=reward_scale,\n gamestate=gamestate,\n flatten_dict_observations=flatten_dict_observations,\n wrapper_kwargs=wrapper_kwargs,\n logger_dir=logger_dir\n )\n set_global_seeds(seed)\n if num_env > 1:\n return SubprocVecEnv([make_thunk(i + start_index) for i in range(num_env)])\n else:\n return DummyVecEnv([make_thunk(start_index)])", - "docstring": "Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo." - }, - { - "code": "def to_value(cls, instance):\n if not isinstance(instance, OctaveUserClass) or not instance._attrs:\n return dict()\n dtype = []\n values = []\n for attr in instance._attrs:\n dtype.append((str(attr), object))\n values.append(getattr(instance, attr))\n struct = np.array([tuple(values)], dtype)\n return MatlabObject(struct, instance._name)", - "docstring": "Convert to a value to send to Octave." - }, - { - "code": "def default_headers(self):\n _headers = {\n \"User-Agent\": \"Pyzotero/%s\" % __version__,\n \"Zotero-API-Version\": \"%s\" % __api_version__,\n }\n if self.api_key:\n _headers[\"Authorization\"] = \"Bearer %s\" % self.api_key\n return _headers", - "docstring": "It's always OK to include these headers" - }, - { - "code": "def _err(self, msg):\n out = '%s%s' % ('[%s] ' % self.description if len(self.description) > 0 else '', msg)\n if self.kind == 'warn':\n print(out)\n return self\n elif self.kind == 'soft':\n global _soft_err\n _soft_err.append(out)\n return self\n else:\n raise AssertionError(out)", - "docstring": "Helper to raise an AssertionError, and optionally prepend custom description." - }, - { - "code": "def stop(self, pin):\n if pin not in self.pwm:\n raise ValueError('Pin {0} is not configured as a PWM. Make sure to first call start for the pin.'.format(pin))\n self.pwm[pin].stop()\n del self.pwm[pin]", - "docstring": "Stop PWM output on specified pin." - }, - { - "code": "def pickle_save(thing,fname):\n pickle.dump(thing, open(fname,\"wb\"),pickle.HIGHEST_PROTOCOL)\n return thing", - "docstring": "save something to a pickle file" - }, - { - "code": "def all(self):\n response = self.api.get(url=PATHS['GET_PROFILES'])\n for raw_profile in response:\n self.append(Profile(self.api, raw_profile))\n return self", - "docstring": "Get all social newtworks profiles" - }, - { - "code": "def vformat(self, format_string, args, kwargs):\n self._used_kwargs = {}\n self._unused_kwargs = {}\n return super(MemorizeFormatter, self).vformat(format_string, args, kwargs)", - "docstring": "Clear used and unused dicts before each formatting." - }, - { - "code": "def compute_alpha(x):\n threshold = _compute_threshold(x)\n alpha1_temp1 = tf.where(tf.greater(x, threshold), x, tf.zeros_like(x, tf.float32))\n alpha1_temp2 = tf.where(tf.less(x, -threshold), x, tf.zeros_like(x, tf.float32))\n alpha_array = tf.add(alpha1_temp1, alpha1_temp2, name=None)\n alpha_array_abs = tf.abs(alpha_array)\n alpha_array_abs1 = tf.where(\n tf.greater(alpha_array_abs, 0), tf.ones_like(alpha_array_abs, tf.float32),\n tf.zeros_like(alpha_array_abs, tf.float32)\n )\n alpha_sum = tf.reduce_sum(alpha_array_abs)\n n = tf.reduce_sum(alpha_array_abs1)\n alpha = tf.div(alpha_sum, n)\n return alpha", - "docstring": "Computing the scale parameter." - }, - { - "code": "def sort_by_size(infile, outfile, smallest_first=False):\n seqs = {}\n file_to_dict(infile, seqs)\n seqs = list(seqs.values())\n seqs.sort(key=lambda x: len(x), reverse=not smallest_first)\n fout = utils.open_file_write(outfile)\n for seq in seqs:\n print(seq, file=fout)\n utils.close(fout)", - "docstring": "Sorts input sequence file by biggest sequence first, writes sorted output file. Set smallest_first=True to have smallest first" - }, - { - "code": "def minus(*args):\n if len(args) == 1:\n return -to_numeric(args[0])\n return to_numeric(args[0]) - to_numeric(args[1])", - "docstring": "Also, converts either to ints or to floats." - }, - { - "code": "def RemoveMultiLineCommentsFromRange(lines, begin, end):\n for i in range(begin, end):\n lines[i] = '/**/'", - "docstring": "Clears a range of lines for multi-line comments." - }, - { - "code": "def retrieve_url(self, url):\n try:\n r = requests.get(url)\n except requests.ConnectionError:\n raise exceptions.RetrieveError('Connection fail')\n if r.status_code >= 400:\n raise exceptions.RetrieveError('Connected, but status code is %s' % (r.status_code))\n real_url = r.url\n content = r.content\n try:\n content_type = r.headers['Content-Type']\n except KeyError:\n content_type, encoding = mimetypes.guess_type(real_url, strict=False)\n self.response = r\n return content_type.lower(), content", - "docstring": "Use requests to fetch remote content" - }, - { - "code": "def read(readme):\n extend = os.path.splitext(readme)[1]\n if (extend == '.rst'):\n import codecs\n return codecs.open(readme, 'r', 'utf-8').read()\n elif (extend == '.md'):\n import pypandoc\n return pypandoc.convert(readme, 'rst')", - "docstring": "Give reST format README for pypi." - }, - { - "code": "def CALLDATALOAD(self, offset):\n if issymbolic(offset):\n if solver.can_be_true(self._constraints, offset == self._used_calldata_size):\n self.constraints.add(offset == self._used_calldata_size)\n raise ConcretizeArgument(1, policy='SAMPLED')\n self._use_calldata(offset, 32)\n data_length = len(self.data)\n bytes = []\n for i in range(32):\n try:\n c = Operators.ITEBV(8, offset + i < data_length, self.data[offset + i], 0)\n except IndexError:\n c = 0\n bytes.append(c)\n return Operators.CONCAT(256, *bytes)", - "docstring": "Get input data of current environment" - }, - { - "code": "def remove_tags(self, tags):\n return self.get_data(\n \"firewalls/%s/tags\" % self.id,\n type=DELETE,\n params={\"tags\": tags}\n )", - "docstring": "Remove tags from this Firewall." - }, - { - "code": "async def rt_unsubscribe(self):\n if self._subscription_id is None:\n _LOGGER.error(\"Not subscribed.\")\n return\n await self._tibber_control.sub_manager.unsubscribe(self._subscription_id)", - "docstring": "Unsubscribe to Tibber rt subscription." - }, - { - "code": "def valid_java_classpath(classpath):\n paths = classpath.split(':')\n for path_entry in paths:\n if not valid_path(path_entry.strip()):\n return False\n return True", - "docstring": "Given a java classpath, check whether the path entries are valid or not" - }, - { - "code": "def clear_obj(self, obj):\n obj_nodes = self.get_nodes_with(obj)\n removed = set()\n for node in obj_nodes:\n if self.has_node(node):\n removed.update(self.clear_descendants(node))\n return removed", - "docstring": "Remove all nodes with `obj` and their descendants." - }, - { - "code": "def run_shell_command(commands, **kwargs):\n p = subprocess.Popen(commands,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n **kwargs)\n output, error = p.communicate()\n return p.returncode, output, error", - "docstring": "Run a shell command." - }, - { - "code": "def register_chooser(self, chooser, **kwargs):\n if not issubclass(chooser, Chooser):\n return self.register_simple_chooser(chooser, **kwargs)\n self.choosers[chooser.model] = chooser(**kwargs)\n return chooser", - "docstring": "Adds a model chooser definition to the registry." - }, - { - "code": "def json_write_data(json_data, filename):\n with open(filename, 'w') as fp:\n json.dump(json_data, fp, indent=4, sort_keys=True, ensure_ascii=False)\n return True\n return False", - "docstring": "Write json data into a file" - }, - { - "code": "def schedule_play(self, call_params):\n path = '/' + self.api_version + '/SchedulePlay/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Schedule playing something on a call Helper" - }, - { - "code": "def _remove_magic(self, data):\n if not self.magic:\n return data\n magic_size = len(self.magic)\n magic = data[:magic_size]\n if magic != self.magic:\n raise Exception('Invalid magic')\n data = data[magic_size:]\n return data", - "docstring": "Verify and remove magic" - }, - { - "code": "def _dump(self, tag, x, lo, hi):\n for i in xrange(lo, hi):\n yield '%s %s' % (tag, x[i])", - "docstring": "Generate comparison results for a same-tagged range." - }, - { - "code": "def index_document(self, text, url):\n \"Index the text of a document.\"\n title = text[:text.index('\\n')].strip()\n docwords = words(text)\n docid = len(self.documents)\n self.documents.append(Document(title, url, len(docwords)))\n for word in docwords:\n if word not in self.stopwords:\n self.index[word][docid] += 1", - "docstring": "Index the text of a document." - }, - { - "code": "def message_is_to_me(self, data):\n return (data.get('type') == 'message' and\n data.get('text', '').startswith(self.address_as))", - "docstring": "If you send a message directly to me" - }, - { - "code": "def require_accessibility(self, user, method):\n if method == 'OPTIONS':\n return\n authz = self.meta.authorization\n if not authz.is_accessible(user, method, self):\n authz.unaccessible()", - "docstring": "Ensure we are allowed to access this resource." - }, - { - "code": "def import_domaindump():\n parser = argparse.ArgumentParser(\n description=\"Imports users, groups and computers result files from the ldapdomaindump tool, will resolve the names from domain_computers output for IPs\")\n parser.add_argument(\"files\", nargs='+',\n help=\"The domaindump files to import\")\n arguments = parser.parse_args()\n domain_users_file = ''\n domain_groups_file = ''\n computer_count = 0\n user_count = 0\n stats = {}\n for filename in arguments.files:\n if filename.endswith('domain_computers.json'):\n print_notification('Parsing domain computers')\n computer_count = parse_domain_computers(filename)\n if computer_count:\n stats['hosts'] = computer_count\n print_success(\"{} hosts imported\".format(computer_count))\n elif filename.endswith('domain_users.json'):\n domain_users_file = filename\n elif filename.endswith('domain_groups.json'):\n domain_groups_file = filename\n if domain_users_file:\n print_notification(\"Parsing domain users\")\n user_count = parse_domain_users(domain_users_file, domain_groups_file)\n if user_count:\n print_success(\"{} users imported\".format(user_count))\n stats['users'] = user_count\n Logger().log(\"import_domaindump\", 'Imported domaindump, found {} user, {} systems'.format(user_count, computer_count), stats)", - "docstring": "Parses ldapdomaindump files and stores hosts and users in elasticsearch." - }, - { - "code": "def write_to_fullarr(data, sample, sidx):\n LOGGER.info(\"writing fullarr %s %s\", sample.name, sidx)\n with h5py.File(data.clust_database, 'r+') as io5:\n chunk = io5[\"catgs\"].attrs[\"chunksize\"][0]\n catg = io5[\"catgs\"]\n nall = io5[\"nalleles\"]\n smpio = os.path.join(data.dirs.across, sample.name+'.tmp.h5')\n with h5py.File(smpio) as indat:\n newcatg = indat[\"icatg\"]\n onall = indat[\"inall\"]\n for cidx in xrange(0, catg.shape[0], chunk):\n end = cidx + chunk\n catg[cidx:end, sidx:sidx+1, :] = np.expand_dims(newcatg[cidx:end, :], axis=1)\n nall[:, sidx:sidx+1] = np.expand_dims(onall, axis=1)", - "docstring": "writes arrays to h5 disk" - }, - { - "code": "def param_changed_to(self, key, to_value, from_value=None):\n last_value = getattr(self.last_manifest, key)\n current_value = self.current_manifest.get(key)\n if from_value is not None:\n return last_value == from_value and current_value == to_value\n return last_value != to_value and current_value == to_value", - "docstring": "Returns true if the given parameter, with name key, has transitioned to the given value." - }, - { - "code": "def read_header(fd, endian):\n flag_class, nzmax = read_elements(fd, endian, ['miUINT32'])\n header = {\n 'mclass': flag_class & 0x0FF,\n 'is_logical': (flag_class >> 9 & 1) == 1,\n 'is_global': (flag_class >> 10 & 1) == 1,\n 'is_complex': (flag_class >> 11 & 1) == 1,\n 'nzmax': nzmax\n }\n header['dims'] = read_elements(fd, endian, ['miINT32'])\n header['n_dims'] = len(header['dims'])\n if header['n_dims'] != 2:\n raise ParseError('Only matrices with dimension 2 are supported.')\n header['name'] = read_elements(fd, endian, ['miINT8'], is_name=True)\n return header", - "docstring": "Read and return the matrix header." - }, - { - "code": "def conference_play(self, call_params):\n path = '/' + self.api_version + '/ConferencePlay/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference Play helper" - }, - { - "code": "def add_walls(self):\n \"Put walls around the entire perimeter of the grid.\"\n for x in range(self.width):\n self.add_thing(Wall(), (x, 0))\n self.add_thing(Wall(), (x, self.height-1))\n for y in range(self.height):\n self.add_thing(Wall(), (0, y))\n self.add_thing(Wall(), (self.width-1, y))", - "docstring": "Put walls around the entire perimeter of the grid." - }, - { - "code": "def filtered_image(self, im):\n q = np.fft.fftn(im)\n for k,v in self.filters:\n q[k] -= v\n return np.real(np.fft.ifftn(q))", - "docstring": "Returns a filtered image after applying the Fourier-space filters" - }, - { - "code": "def _remove_header(self, data, options):\n version_info = self._get_version_info(options['version'])\n header_size = version_info['header_size']\n if options['flags']['timestamp']:\n header_size += version_info['timestamp_size']\n data = data[header_size:]\n return data", - "docstring": "Remove header from data" - }, - { - "code": "def text_rank (path):\n graph = build_graph(json_iter(path))\n ranks = nx.pagerank(graph)\n return graph, ranks", - "docstring": "run the TextRank algorithm" - }, - { - "code": "def self_edge_filter(_: BELGraph, source: BaseEntity, target: BaseEntity, __: str) -> bool:\n return source == target", - "docstring": "Check if the source and target nodes are the same." - }, - { - "code": "def pack_triples_numpy(triples):\n if len(triples) == 0:\n return np.array([], dtype=np.int64)\n return np.stack(list(map(_transform_triple_numpy, triples)), axis=0)", - "docstring": "Packs a list of triple indexes into a 2D numpy array." - }, - { - "code": "def main():\n services = ServiceSearch()\n argparse = services.argparser\n argparse.add_argument('-f', '--file', type=str, help=\"File\")\n arguments = argparse.parse_args()\n if not arguments.file:\n print_error(\"Please provide a file with credentials seperated by ':'\")\n sys.exit()\n services = services.get_services(search=[\"Tomcat\"], up=True, tags=['!tomcat_brute'])\n credentials = []\n with open(arguments.file, 'r') as f:\n credentials = f.readlines()\n for service in services:\n print_notification(\"Checking ip:{} port {}\".format(service.address, service.port))\n url = 'http://{}:{}/manager/html'\n gevent.spawn(brutefore_passwords, service.address, url.format(service.address, service.port), credentials, service)\n service.add_tag('tomcat_brute')\n service.update(tags=service.tags)\n gevent.wait()\n Logger().log(\"tomcat_brute\", \"Performed tomcat bruteforce scan\", {'scanned_services': len(services)})", - "docstring": "Checks the arguments to brutefore and spawns greenlets to perform the bruteforcing." - }, - { - "code": "def html_temp_launch(html):\n fname = tempfile.gettempdir()+\"/swhlab/temp.html\"\n with open(fname,'w') as f:\n f.write(html)\n webbrowser.open(fname)", - "docstring": "given text, make it a temporary HTML file and launch it." - }, - { - "code": "def putstats(pfile, handle, statdicts):\n with open(pfile, 'r') as infile:\n filestats, samplestats = pickle.load(infile)\n perfile, fsamplehits, fbarhits, fmisses, fdbars = statdicts\n perfile[handle] += filestats\n samplehits, barhits, misses, dbars = samplestats\n fsamplehits.update(samplehits)\n fbarhits.update(barhits)\n fmisses.update(misses)\n fdbars.update(dbars)\n statdicts = perfile, fsamplehits, fbarhits, fmisses, fdbars\n return statdicts", - "docstring": "puts stats from pickles into a dictionary" - }, - { - "code": "def all_package_versions(package):\n info = PyPI.package_info(package)\n return info and sorted(info['releases'].keys(), key=lambda x: x.split(), reverse=True) or []", - "docstring": "All versions for package" - }, - { - "code": "def exists(self, server):\n try:\n server.get(\n 'challenge',\n replacements={'slug': self.slug})\n except Exception:\n return False\n return True", - "docstring": "Check if a challenge exists on the server" - }, - { - "code": "def convert_nm(nm, notation=IP_DOT, inotation=IP_UNKNOWN, check=True):\n return _convert(nm, notation, inotation, _check=check, _isnm=True)", - "docstring": "Convert a netmask to another notation." - }, - { - "code": "def add_additional_args(parsers):\n for parser in parsers:\n cli_args.add_verbose(parser)\n cli_args.add_config(parser)\n parser.add_argument(\n '--heron-dir',\n default=config.get_heron_dir(),\n help='Path to Heron home directory')", - "docstring": "add additional parameters to parser" - }, - { - "code": "def transform_courserun_schedule(self, content_metadata_item):\n start = content_metadata_item.get('start') or UNIX_MIN_DATE_STRING\n end = content_metadata_item.get('end') or UNIX_MAX_DATE_STRING\n return [{\n 'startDate': parse_datetime_to_epoch_millis(start),\n 'endDate': parse_datetime_to_epoch_millis(end),\n 'active': current_time_is_in_interval(start, end)\n }]", - "docstring": "Return the schedule of the courseun content item." - }, - { - "code": "def _check_peptide_inputs(self, peptides):\n require_iterable_of(peptides, string_types)\n check_X = not self.allow_X_in_peptides\n check_lower = not self.allow_lowercase_in_peptides\n check_min_length = self.min_peptide_length is not None\n min_length = self.min_peptide_length\n check_max_length = self.max_peptide_length is not None\n max_length = self.max_peptide_length\n for p in peptides:\n if not p.isalpha():\n raise ValueError(\"Invalid characters in peptide '%s'\" % p)\n elif check_X and \"X\" in p:\n raise ValueError(\"Invalid character 'X' in peptide '%s'\" % p)\n elif check_lower and not p.isupper():\n raise ValueError(\"Invalid lowercase letters in peptide '%s'\" % p)\n elif check_min_length and len(p) < min_length:\n raise ValueError(\n \"Peptide '%s' too short (%d chars), must be at least %d\" % (\n p, len(p), min_length))\n elif check_max_length and len(p) > max_length:\n raise ValueError(\n \"Peptide '%s' too long (%d chars), must be at least %d\" % (\n p, len(p), max_length))", - "docstring": "Check peptide sequences to make sure they are valid for this predictor." - }, - { - "code": "def getCustomDict(cls):\n if not os.path.exists(cls.getPath()):\n return dict()\n properties = Configuration._readConfigFile(os.path.basename(\n cls.getPath()), os.path.dirname(cls.getPath()))\n values = dict()\n for propName in properties:\n if 'value' in properties[propName]:\n values[propName] = properties[propName]['value']\n return values", - "docstring": "Returns a dict of all temporary values in custom configuration file" - }, - { - "code": "def getOutputNames(self):\n outputs = self.getSpec().outputs\n return [outputs.getByIndex(i)[0] for i in xrange(outputs.getCount())]", - "docstring": "Returns list of output names in spec." - }, - { - "code": "def convert_to_G(self, word):\n value = 0.0\n if word[-1] == 'G' or word[-1] == 'g':\n value = float(word[:-1])\n elif word[-1] == 'M' or word[-1] == 'm':\n value = float(word[:-1]) / 1000.0\n elif word[-1] == 'K' or word[-1] == 'k':\n value = float(word[:-1]) / 1000.0 / 1000.0\n else:\n value = float(word) / 1000.0 / 1000.0 / 1000.0\n return str(value)", - "docstring": "Given a size such as '2333M', return the converted value in G" - }, - { - "code": "def add_arguments(parser):\n parser.add_argument('-o', '--old-environment', help='Old environment name', required=True)\n parser.add_argument('-n', '--new-environment', help='New environment name', required=True)", - "docstring": "adds arguments for the swap urls command" - }, - { - "code": "def _get_log(self, limit=None):\n self.ui.pushbuffer()\n commands.log(self.ui, self.repo, limit=limit, date=None, rev=None, user=None)\n res = self.ui.popbuffer().strip()\n logList = []\n for logentry in res.split(\"\\n\\n\"):\n log = {}\n logList.append(log)\n for line in logentry.split(\"\\n\"):\n k, v = line.split(\":\", 1)\n assert k in (\"changeset\", \"tag\", \"user\", \"date\", \"summary\")\n log[k.strip()] = v.strip()\n log[\"parsed_date\"] = util.parse_time_string(log[\"date\"])\n local_id, unid = log[\"changeset\"].split(\":\")\n log[\"local_id\"] = int(local_id)\n log[\"unid\"] = unid\n return logList", - "docstring": "Read log entries into a list of dictionaries." - }, - { - "code": "def _get_base(group, **conn):\n group['_version'] = 1\n group.update(get_group_api(group['GroupName'], users=False, **conn)['Group'])\n group['CreateDate'] = get_iso_string(group['CreateDate'])\n return group", - "docstring": "Fetch the base IAM Group." - }, - { - "code": "def init_app(self, app):\n app.config.setdefault(\"TRACY_REQUIRE_CLIENT\", False)\n if not hasattr(app, 'extensions'):\n app.extensions = {}\n app.extensions['restpoints'] = self\n app.before_request(self._before)\n app.after_request(self._after)", - "docstring": "Setup before_request, after_request handlers for tracing." - }, - { - "code": "def exists(self, name):\n with self.settings(hide('running', 'stdout', 'warnings'), warn_only=True):\n return self.run('getent group %(name)s' % locals()).succeeded", - "docstring": "Check if a group exists." - }, - { - "code": "def writable_path(path):\n if os.path.exists(path):\n return os.access(path, os.W_OK)\n try:\n with open(path, 'w'):\n pass\n except (OSError, IOError):\n return False\n else:\n os.remove(path)\n return True", - "docstring": "Test whether a path can be written to." - }, - { - "code": "def _check_groups(s, groups):\n ans = []\n for g in groups:\n ans.extend(g)\n if np.unique(ans).size != np.size(ans):\n return False\n elif np.unique(ans).size != s.obj_get_positions().shape[0]:\n return False\n else:\n return (np.arange(s.obj_get_radii().size) == np.sort(ans)).all()", - "docstring": "Ensures that all particles are included in exactly 1 group" - }, - { - "code": "def _boottime_linux():\n global __boottime\n try:\n f = open('/proc/stat', 'r')\n for line in f:\n if line.startswith('btime'):\n __boottime = int(line.split()[1])\n if datetime is None:\n raise NotImplementedError('datetime module required.')\n return datetime.fromtimestamp(__boottime)\n except (IOError, IndexError):\n return None", - "docstring": "A way to figure out the boot time directly on Linux." - }, - { - "code": "def LDRD(cpu, dest1, dest2, src, offset=None):\n assert dest1.type == 'register'\n assert dest2.type == 'register'\n assert src.type == 'memory'\n mem1 = cpu.read_int(src.address(), 32)\n mem2 = cpu.read_int(src.address() + 4, 32)\n writeback = cpu._compute_writeback(src, offset)\n dest1.write(mem1)\n dest2.write(mem2)\n cpu._cs_hack_ldr_str_writeback(src, offset, writeback)", - "docstring": "Loads double width data from memory." - }, - { - "code": "def as_dict(self, *args, **kwargs):\n self_as_dict = super(ClinVarAllele, self).as_dict(*args, **kwargs)\n self_as_dict['hgvs'] = self.hgvs\n self_as_dict['clnalleleid'] = self.clnalleleid\n self_as_dict['clnsig'] = self.clnsig\n self_as_dict['clndn'] = self.clndn\n self_as_dict['clndisdb'] = self.clndisdb\n self_as_dict['clnvi'] = self.clnvi\n return self_as_dict", - "docstring": "Return ClinVarAllele data as dict object." - }, - { - "code": "def prop_symbols(x):\n \"Return a list of all propositional symbols in x.\"\n if not isinstance(x, Expr):\n return []\n elif is_prop_symbol(x.op):\n return [x]\n else:\n return list(set(symbol for arg in x.args\n for symbol in prop_symbols(arg)))", - "docstring": "Return a list of all propositional symbols in x." - }, - { - "code": "def load_image(self):\n try:\n image = initializers.load_tiff(self.filename)\n image = initializers.normalize(\n image, invert=self.invert, scale=self.exposure,\n dtype=self.float_precision\n )\n except IOError as e:\n log.error(\"Could not find image '%s'\" % self.filename)\n raise e\n return image", - "docstring": "Read the file and perform any transforms to get a loaded image" - }, - { - "code": "def best_units(self, sequence):\n ts_range = self.value(max(sequence)) - self.value(min(sequence))\n package = self.determine_package(sequence[0])\n if package == 'pandas':\n cuts = [\n (0.9, 'us'),\n (0.9, 'ms'),\n (0.9, 's'),\n (9, 'm'),\n (6, 'h'),\n (4, 'd'),\n (4, 'w'),\n (4, 'M'),\n (3, 'y')]\n denomination = NANOSECONDS\n base_units = 'ns'\n else:\n cuts = [\n (0.9, 's'),\n (9, 'm'),\n (6, 'h'),\n (4, 'd'),\n (4, 'w'),\n (4, 'M'),\n (3, 'y')]\n denomination = SECONDS\n base_units = 'ms'\n for size, units in reversed(cuts):\n if ts_range >= size*denomination[units]:\n return units\n return base_units", - "docstring": "Determine good units for representing a sequence of timedeltas" - }, - { - "code": "def save(url, *args, **kwargs):\n device = heimdallDevice(kwargs.get('device', None))\n kwargs['width'] = kwargs.get('width', None) or device.width\n kwargs['height'] = kwargs.get('height', None) or device.height\n kwargs['user_agent'] = kwargs.get('user_agent', None) or device.user_agent\n screenshot_image = screenshot(url, **kwargs)\n if kwargs.get('optimize'):\n image = Image.open(screenshot_image.path)\n image.save(screenshot_image.path, optimize=True)\n return screenshot_image", - "docstring": "Parse the options, set defaults and then fire up PhantomJS." - }, - { - "code": "def create_project_thread(session, member_ids, project_id, message):\n return create_thread(session, member_ids, 'project', project_id, message)", - "docstring": "Create a project thread" - }, - { - "code": "def apply_filters(self, query, filters):\n assert isinstance(query, peewee.Query)\n assert isinstance(filters, dict)", - "docstring": "Apply user specified filters to query" - }, - { - "code": "def does_not_contain(self, *items):\n if len(items) == 0:\n raise ValueError('one or more args must be given')\n elif len(items) == 1:\n if items[0] in self.val:\n self._err('Expected <%s> to not contain item <%s>, but did.' % (self.val, items[0]))\n else:\n found = []\n for i in items:\n if i in self.val:\n found.append(i)\n if found:\n self._err('Expected <%s> to not contain items %s, but did contain %s.' % (self.val, self._fmt_items(items), self._fmt_items(found)))\n return self", - "docstring": "Asserts that val does not contain the given item or items." - }, - { - "code": "def SHA3(self, start, size):\n data = self.try_simplify_to_constant(self.read_buffer(start, size))\n if issymbolic(data):\n known_sha3 = {}\n self._publish('on_symbolic_sha3', data, known_sha3)\n value = 0\n known_hashes_cond = False\n for key, hsh in known_sha3.items():\n assert not issymbolic(key), \"Saved sha3 data,hash pairs should be concrete\"\n cond = key == data\n known_hashes_cond = Operators.OR(cond, known_hashes_cond)\n value = Operators.ITEBV(256, cond, hsh, value)\n return value\n value = sha3.keccak_256(data).hexdigest()\n value = int(value, 16)\n self._publish('on_concrete_sha3', data, value)\n logger.info(\"Found a concrete SHA3 example %r -> %x\", data, value)\n return value", - "docstring": "Compute Keccak-256 hash" - }, - { - "code": "def from_config(config, kwargs=None):\n return util.get_object(\n obj=config,\n predefined=tensorforce.core.optimizers.solvers.solvers,\n kwargs=kwargs\n )", - "docstring": "Creates a solver from a specification dict." - }, - { - "code": "def load(self):\n try:\n with open(self._state_file) as f:\n state = yaml.safe_load(f)\n self._containers = state['containers']\n except (IOError, OSError) as err:\n if err.errno == errno.ENOENT:\n raise NotInitializedError(\"No blockade exists in this context\")\n raise InconsistentStateError(\"Failed to load Blockade state: \"\n + str(err))\n except Exception as err:\n raise InconsistentStateError(\"Failed to load Blockade state: \"\n + str(err))", - "docstring": "Try to load a blockade state file in the current directory" - }, - { - "code": "def heapreplace_max(heap, item):\n returnitem = heap[0]\n heap[0] = item\n _siftup_max(heap, 0)\n return returnitem", - "docstring": "Maxheap version of a heappop followed by a heappush." - }, - { - "code": "def poll(self):\n ret = self.communicationChannel.receive_finished()\n self.nruns -= len(ret)\n return ret", - "docstring": "Return pairs of run ids and results of finish event loops." - }, - { - "code": "def formatwarning(message, category, filename, lineno, line=None):\n try:\n unicodetype = unicode\n except NameError:\n unicodetype = ()\n try:\n message = str(message)\n except UnicodeEncodeError:\n pass\n s = \"%s: %s: %s\\n\" % (lineno, category.__name__, message)\n line = linecache.getline(filename, lineno) if line is None else line\n if line:\n line = line.strip()\n if isinstance(s, unicodetype) and isinstance(line, str):\n line = unicode(line, 'latin1')\n s += \" %s\\n\" % line\n if isinstance(s, unicodetype) and isinstance(filename, str):\n enc = sys.getfilesystemencoding()\n if enc:\n try:\n filename = unicode(filename, enc)\n except UnicodeDecodeError:\n pass\n s = \"%s:%s\" % (filename, s)\n return s", - "docstring": "Function to format a warning the standard way." - }, - { - "code": "def add(self, string, start, end, line):\n if string.strip():\n self.start_lineno = min(self.start_lineno, start[0])\n self.end_lineno = max(self.end_lineno, end[0])", - "docstring": "Add lines to the block." - }, - { - "code": "def read_yaml_file(path, loader=ExtendedSafeLoader):\n with open(path) as fh:\n return load(fh, loader)", - "docstring": "Open a file, read it and return its contents." - }, - { - "code": "def raise_for_status(self):\n if not self.status:\n return\n error = find_exception_by_code(self.status)\n message = None\n screen = None\n stacktrace = None\n if isinstance(self.value, str):\n message = self.value\n elif isinstance(self.value, dict):\n message = self.value.get('message', None)\n screen = self.value.get('screen', None)\n stacktrace = self.value.get('stacktrace', None)\n raise WebDriverException(error, message, screen, stacktrace)", - "docstring": "Raise WebDriverException if returned status is not zero." - }, - { - "code": "def write_bel_namespace(self, file: TextIO, use_names: bool = False) -> None:\n if not self.is_populated():\n self.populate()\n if use_names and not self.has_names:\n raise ValueError\n values = (\n self._get_namespace_name_to_encoding(desc='writing names')\n if use_names else\n self._get_namespace_identifier_to_encoding(desc='writing identifiers')\n )\n write_namespace(\n namespace_name=self._get_namespace_name(),\n namespace_keyword=self._get_namespace_keyword(),\n namespace_query_url=self.identifiers_url,\n values=values,\n file=file,\n )", - "docstring": "Write as a BEL namespace file." - }, - { - "code": "def getParent(abfFname):\n child=os.path.abspath(abfFname)\n files=sorted(glob.glob(os.path.dirname(child)+\"/*.*\"))\n parentID=abfFname\n for fname in files:\n if fname.endswith(\".abf\") and fname.replace(\".abf\",\".TIF\") in files:\n parentID=os.path.basename(fname).replace(\".abf\",\"\")\n if os.path.basename(child) in fname:\n break\n return parentID", - "docstring": "given an ABF file name, return the ABF of its parent." - }, - { - "code": "def union(self, other_streamlet):\n from heronpy.streamlet.impl.unionbolt import UnionStreamlet\n union_streamlet = UnionStreamlet(self, other_streamlet)\n self._add_child(union_streamlet)\n other_streamlet._add_child(union_streamlet)\n return union_streamlet", - "docstring": "Returns a new Streamlet that consists of elements of both this and other_streamlet" - }, - { - "code": "def _init_worker(model, loopless, sense):\n global _model\n global _loopless\n _model = model\n _model.solver.objective.direction = sense\n _loopless = loopless", - "docstring": "Initialize a global model object for multiprocessing." - }, - { - "code": "def SLOAD(self, offset):\n storage_address = self.address\n self._publish('will_evm_read_storage', storage_address, offset)\n value = self.world.get_storage_data(storage_address, offset)\n self._publish('did_evm_read_storage', storage_address, offset, value)\n return value", - "docstring": "Load word from storage" - }, - { - "code": "def disableTap(self):\n if self._tapFileIn is not None:\n self._tapFileIn.close()\n self._tapFileIn = None\n if self._tapFileOut is not None:\n self._tapFileOut.close()\n self._tapFileOut = None", - "docstring": "Disable writing of output tap files." - }, - { - "code": "def SMOD(self, a, b):\n s0, s1 = to_signed(a), to_signed(b)\n sign = Operators.ITEBV(256, s0 < 0, -1, 1)\n try:\n result = (Operators.ABS(s0) % Operators.ABS(s1)) * sign\n except ZeroDivisionError:\n result = 0\n return Operators.ITEBV(256, s1 == 0, 0, result)", - "docstring": "Signed modulo remainder operation" - }, - { - "code": "def union(self, b):\r\n mx, my = min(self.x, b.x), min(self.y, b.y)\r\n return Bounds(mx, my,\r\n max(self.x+self.width, b.x+b.width) - mx,\r\n max(self.y+self.height, b.y+b.height) - my)", - "docstring": "Returns bounds that encompass the union of the two." - }, - { - "code": "def context(self):\n if not self._context:\n self._context = context.get_admin_context()\n return self._context", - "docstring": "Provides an admin context for workers." - }, - { - "code": "def lenv(self):\n _env = type(env)()\n for _k, _v in six.iteritems(env):\n if _k.startswith(self.name+'_'):\n _env[_k[len(self.name)+1:]] = _v\n return _env", - "docstring": "Returns a version of env filtered to only include the variables in our namespace." - }, - { - "code": "def remove(self, doc_type, doc_ids, **kwargs):\n try:\n actions = []\n for doc_id in doc_ids:\n log.debug(\"Removing document of type %s and index %s\", doc_type, doc_id)\n action = {\n '_op_type': 'delete',\n \"_index\": self.index_name,\n \"_type\": doc_type,\n \"_id\": doc_id\n }\n actions.append(action)\n bulk(self._es, actions, **kwargs)\n except BulkIndexError as ex:\n valid_errors = [error for error in ex.errors if error['delete']['status'] != 404]\n if valid_errors:\n log.exception(\"An error occurred while removing documents from the index.\")\n raise", - "docstring": "Implements call to remove the documents from the index" - }, - { - "code": "def _should_run(self, iteration, max_iterations):\n if iteration == 0:\n return True\n if max_iterations:\n if iteration < max_iterations:\n return True\n elif max_iterations is None:\n if self._dynamic:\n return True\n else:\n return False\n return True\n if not self._dynamic:\n return False\n return False", - "docstring": "Return False if bot should quit" - }, - { - "code": "def verbose(cls, key=False, default=''):\n if key is False:\n items = cls._item_dict.values()\n return [(x.key, x.value) for x in sorted(items, key=lambda x:x.sort or x.key)]\n item = cls._item_dict.get(key)\n return item.value if item else default", - "docstring": "Returns the verbose name for a given enum value" - }, - { - "code": "def rewind(self):\n super(FileRecordStream, self).rewind()\n self.close()\n self._file = open(self._filename, self._mode)\n self._reader = csv.reader(self._file, dialect=\"excel\")\n self._reader.next()\n self._reader.next()\n self._reader.next()\n self._recordCount = 0", - "docstring": "Put us back at the beginning of the file again." - }, - { - "code": "def _colorize(output):\n if not pygments:\n return output\n return pygments.highlight(output,\n pygments.lexers.PythonLexer(),\n pygments.formatters.Terminal256Formatter(style='monokai'))", - "docstring": "Return `output` colorized with Pygments, if available." - }, - { - "code": "def param_particle_rad(self, ind):\n ind = self._vps(listify(ind))\n return [self._i2p(i, 'a') for i in ind]", - "docstring": "Get radius of one or more particles" - }, - { - "code": "def _lastRecursiveChild(self):\n \"Finds the last element beneath this object to be parsed.\"\n lastChild = self\n while hasattr(lastChild, 'contents') and lastChild.contents:\n lastChild = lastChild.contents[-1]\n return lastChild", - "docstring": "Finds the last element beneath this object to be parsed." - }, - { - "code": "def includeme(config):\n api_key_authn_policy = APIKeyAuthenticationPolicy()\n config.include('openstax_accounts')\n openstax_authn_policy = config.registry.getUtility(\n IOpenstaxAccountsAuthenticationPolicy)\n policies = [api_key_authn_policy, openstax_authn_policy]\n authn_policy = MultiAuthenticationPolicy(policies)\n config.set_authentication_policy(authn_policy)\n authz_policy = ACLAuthorizationPolicy()\n config.set_authorization_policy(authz_policy)", - "docstring": "Configuration include fuction for this module" - }, - { - "code": "def print_boggle(board):\n \"Print the board in a 2-d array.\"\n n2 = len(board); n = exact_sqrt(n2)\n for i in range(n2):\n if i % n == 0 and i > 0: print\n if board[i] == 'Q': print 'Qu',\n else: print str(board[i]) + ' ',\n print", - "docstring": "Print the board in a 2-d array." - }, - { - "code": "def transform_args(self,*args,**kwargs):\n newargs = self._combineargs(*args, **kwargs)\n return self._build_arg_list(**newargs)", - "docstring": "Combine arguments and turn them into gromacs tool arguments." - }, - { - "code": "def handle_err(self):\n with self.lock:\n if self._state == 'connecting' and self._dst_addrs:\n self._hup = False\n self._set_state(\"connect\")\n return\n self._socket.close()\n self._socket = None\n self._set_state(\"aborted\")\n self._write_queue.clear()\n self._write_queue_cond.notify()\n raise PyXMPPIOError(\"Unhandled error on socket\")", - "docstring": "Handle an error reported." - }, - { - "code": "def TK_message(title,msg):\n root = tkinter.Tk()\n root.withdraw()\n root.attributes(\"-topmost\", True)\n root.lift()\n tkinter.messagebox.showwarning(title, msg)\n root.destroy()", - "docstring": "use the GUI to pop up a message." - }, - { - "code": "def _addRecordToKNN(self, record):\n knn = self._knnclassifier._knn\n prototype_idx = self._knnclassifier.getParameter('categoryRecencyList')\n category = self._labelListToCategoryNumber(record.anomalyLabel)\n if record.ROWID in prototype_idx:\n knn.prototypeSetCategory(record.ROWID, category)\n return\n pattern = self._getStateAnomalyVector(record)\n rowID = record.ROWID\n knn.learn(pattern, category, rowID=rowID)", - "docstring": "Adds the record to the KNN classifier." - }, - { - "code": "def _fly(self, board, layers, things, the_plot):\n if (self.character in the_plot['bunker_hitters'] or\n self.character in the_plot['marauder_hitters']):\n return self._teleport((-1, -1))\n self._north(board, the_plot)", - "docstring": "Handles the behaviour of visible bolts flying toward Marauders." - }, - { - "code": "def check_sla(self, sla, diff_metric):\n try:\n if sla.display is '%':\n diff_val = float(diff_metric['percent_diff'])\n else:\n diff_val = float(diff_metric['absolute_diff'])\n except ValueError:\n return False\n if not (sla.check_sla_passed(diff_val)):\n self.sla_failures += 1\n self.sla_failure_list.append(DiffSLAFailure(sla, diff_metric))\n return True", - "docstring": "Check whether the SLA has passed or failed" - }, - { - "code": "def path_required(func):\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n if self.path is None:\n warnings.warn('Must load (Repository.load_repository) or initialize (Repository.create_repository) the repository first !')\n return\n return func(self, *args, **kwargs)\n return wrapper", - "docstring": "Decorate methods when repository path is required." - }, - { - "code": "def abort(self):\n self.mutex.release()\n self.turnstile.release()\n self.mutex.release()\n self.turnstile2.release()", - "docstring": "ensure the master exit from Barrier" - }, - { - "code": "def error(self, fail=True, action=''):\n e = 'There was an unknown error communicating with the device.'\n if action:\n e = 'While %s: %s' % (action, e)\n log.error(e)\n if fail:\n raise IOError(e)", - "docstring": "SHOULD BE PRIVATE METHOD" - }, - { - "code": "def gmx_resid(self, resid):\n try:\n gmx_resid = int(self.offset[resid])\n except (TypeError, IndexError):\n gmx_resid = resid + self.offset\n except KeyError:\n raise KeyError(\"offset must be a dict that contains the gmx resid for {0:d}\".format(resid))\n return gmx_resid", - "docstring": "Returns resid in the Gromacs index by transforming with offset." - }, - { - "code": "def print_big_dir(self, top_n=5):\n self.assert_is_dir_and_exists()\n size_table = sorted(\n [(p, p.dirsize) for p in self.select_dir(recursive=False)],\n key=lambda x: x[1],\n reverse=True,\n )\n for p, size in size_table[:top_n]:\n print(\"{:<9} {:<9}\".format(repr_data_size(size), p.abspath))", - "docstring": "Print ``top_n`` big dir in this dir." - }, - { - "code": "def unlock(self):\n self.init()\n r = self.local_renderer\n if self.file_exists(r.env.lockfile_path):\n self.vprint('Unlocking %s.' % r.env.lockfile_path)\n r.run_or_local('rm -f {lockfile_path}')", - "docstring": "Unmarks the remote server as currently being deployed to." - }, - { - "code": "def rt_subscription_running(self):\n return (\n self._tibber_control.sub_manager is not None\n and self._tibber_control.sub_manager.is_running\n and self._subscription_id is not None\n )", - "docstring": "Is real time subscription running." - }, - { - "code": "def emph(txt, rval=None):\n if rval is None:\n info(txt)\n elif rval == 0:\n good(txt)\n else:\n err(txt)", - "docstring": "Print, emphasized based on rval" - }, - { - "code": "def prepare_attrib_mapping(self, primitive):\n buffer_info = []\n for name, accessor in primitive.attributes.items():\n info = VBOInfo(*accessor.info())\n info.attributes.append((name, info.components))\n if buffer_info and buffer_info[-1].buffer_view == info.buffer_view:\n if buffer_info[-1].interleaves(info):\n buffer_info[-1].merge(info)\n continue\n buffer_info.append(info)\n return buffer_info", - "docstring": "Pre-parse buffer mappings for each VBO to detect interleaved data for a primitive" - }, - { - "code": "def _do_unzip(zipped_file, output_directory):\n z = zipfile.ZipFile(zipped_file)\n for path in z.namelist():\n relative_path = os.path.join(output_directory, path)\n dirname, dummy = os.path.split(relative_path)\n try:\n if relative_path.endswith(os.sep) and not os.path.exists(dirname):\n os.makedirs(relative_path)\n elif not os.path.exists(relative_path):\n dirname = os.path.join(output_directory, os.path.dirname(path))\n if os.path.dirname(path) and not os.path.exists(dirname):\n os.makedirs(dirname)\n fd = open(relative_path, \"w\")\n fd.write(z.read(path))\n fd.close()\n except IOError, e:\n raise e\n return output_directory", - "docstring": "Perform the actual uncompression." - }, - { - "code": "def result_files(self):\n reps = OPJ(self.workdir, self.name+\"-K-*-rep-*_f\")\n repfiles = glob.glob(reps)\n return repfiles", - "docstring": "returns a list of files that have finished structure" - }, - { - "code": "def deserialize_organization(organization_dict):\n return models.Organization(\n id=organization_dict.get('id'),\n name=organization_dict.get('name', ''),\n short_name=organization_dict.get('short_name', ''),\n description=organization_dict.get('description', ''),\n logo=organization_dict.get('logo', '')\n )", - "docstring": "Organization dict-to-object serialization" - }, - { - "code": "def command(self):\n cmd = self.config.get('command', None)\n if cmd is None:\n return\n cmd = cmd[platform]\n return cmd['path'], cmd['args']", - "docstring": "Command used to launch this application module" - }, - { - "code": "def translate_buffer_format(vertex_format):\n buffer_format = []\n attributes = []\n mesh_attributes = []\n if \"T2F\" in vertex_format:\n buffer_format.append(\"2f\")\n attributes.append(\"in_uv\")\n mesh_attributes.append((\"TEXCOORD_0\", \"in_uv\", 2))\n if \"C3F\" in vertex_format:\n buffer_format.append(\"3f\")\n attributes.append(\"in_color\")\n mesh_attributes.append((\"NORMAL\", \"in_color\", 3))\n if \"N3F\" in vertex_format:\n buffer_format.append(\"3f\")\n attributes.append(\"in_normal\")\n mesh_attributes.append((\"NORMAL\", \"in_normal\", 3))\n buffer_format.append(\"3f\")\n attributes.append(\"in_position\")\n mesh_attributes.append((\"POSITION\", \"in_position\", 3))\n return \" \".join(buffer_format), attributes, mesh_attributes", - "docstring": "Translate the buffer format" - }, - { - "code": "def _underscore_to_camelcase(value):\n def camelcase():\n yield str.lower\n while True:\n yield str.capitalize\n c = camelcase()\n return \"\".join(next(c)(x) if x else '_' for x in value.split(\"_\"))", - "docstring": "Convert Python snake case back to mixed case." - }, - { - "code": "def _wildcard_to_dec(nm, check=False):\n if check and not is_wildcard_nm(nm):\n raise ValueError('_wildcard_to_dec: invalid netmask: \"%s\"' % nm)\n return 0xFFFFFFFF - _dot_to_dec(nm, check=False)", - "docstring": "Wildcard bits to decimal conversion." - }, - { - "code": "def require_template_debug(f):\n def _(*args, **kwargs):\n TEMPLATE_DEBUG = getattr(settings, 'TEMPLATE_DEBUG', False)\n return f(*args, **kwargs) if TEMPLATE_DEBUG else ''\n return _", - "docstring": "Decorated function is a no-op if TEMPLATE_DEBUG is False" - }, - { - "code": "def ensure_specifier_exists(db_spec):\n local_match = LOCAL_RE.match(db_spec)\n remote_match = REMOTE_RE.match(db_spec)\n plain_match = PLAIN_RE.match(db_spec)\n if local_match:\n db_name = local_match.groupdict().get('database')\n server = shortcuts.get_server()\n if db_name not in server:\n server.create(db_name)\n return True\n elif remote_match:\n hostname, portnum, database = map(remote_match.groupdict().get,\n ('hostname', 'portnum', 'database'))\n server = shortcuts.get_server(\n server_url=('http://%s:%s' % (hostname, portnum)))\n if database not in server:\n server.create(database)\n return True\n elif plain_match:\n db_name = plain_match.groupdict().get('database')\n server = shortcuts.get_server()\n if db_name not in server:\n server.create(db_name)\n return True\n return False", - "docstring": "Make sure a DB specifier exists, creating it if necessary." - }, - { - "code": "def base_boxes(self):\n return sorted(list(set([name for name, provider in self._box_list()])))", - "docstring": "Get the list of vagrant base boxes" - }, - { - "code": "def parser(self):\n module = self.module\n subcommands = self.subcommands\n if subcommands:\n module_desc = inspect.getdoc(module)\n parser = Parser(description=module_desc, module=module)\n subparsers = parser.add_subparsers()\n for sc_name, callback in subcommands.items():\n sc_name = sc_name.replace(\"_\", \"-\")\n cb_desc = inspect.getdoc(callback)\n sc_parser = subparsers.add_parser(\n sc_name,\n callback=callback,\n help=cb_desc\n )\n else:\n parser = Parser(callback=self.callbacks[self.function_name], module=module)\n return parser", - "docstring": "return the parser for the current name" - }, - { - "code": "def one(nodes, or_none=False):\n if not nodes and or_none:\n return None\n assert len(\n nodes) == 1, 'Expected 1 result. Received %d results.' % (len(nodes))\n return nodes[0]", - "docstring": "Assert that there is exactly one node in the give list, and return it." - }, - { - "code": "def name(object):\n \"Try to find some reasonable name for the object.\"\n return (getattr(object, 'name', 0) or getattr(object, '__name__', 0)\n or getattr(getattr(object, '__class__', 0), '__name__', 0)\n or str(object))", - "docstring": "Try to find some reasonable name for the object." - }, - { - "code": "def _load_class(class_path, default):\n if class_path is None:\n return default\n component = class_path.rsplit('.', 1)\n result_processor = getattr(\n importlib.import_module(component[0]),\n component[1],\n default\n ) if len(component) > 1 else default\n return result_processor", - "docstring": "Loads the class from the class_path string" - }, - { - "code": "def map_memory_callback(self, address, size, perms, name, offset, result):\n logger.info(' '.join((\"Mapping Memory @\",\n hex(address) if type(address) is int else \"0x??\",\n hr_size(size), \"-\",\n perms, \"-\",\n f\"{name}:{hex(offset) if name else ''}\", \"->\",\n hex(result))))\n self._emu.mem_map(address, size, convert_permissions(perms))\n self.copy_memory(address, size)", - "docstring": "Catches did_map_memory and copies the mapping into Manticore" - }, - { - "code": "def _inactivate_organization_course_relationship(relationship):\n relationship = internal.OrganizationCourse.objects.get(\n id=relationship.id,\n active=True\n )\n _inactivate_record(relationship)", - "docstring": "Inactivates an active organization-course relationship" - }, - { - "code": "def maybe_decode_header(header):\n value, encoding = decode_header(header)[0]\n if encoding:\n return value.decode(encoding)\n else:\n return value", - "docstring": "Decodes an encoded 7-bit ASCII header value into it's actual value." - }, - { - "code": "def start(self):\n self.__thread = Thread(target=self.__run, args=(True, False))\n self.__thread.setDaemon(True)\n self.__thread.start()", - "docstring": "Start DMESG job in thread" - }, - { - "code": "def word_to_id(self, word):\n if word in self.vocab:\n return self.vocab[word]\n else:\n return self.unk_id", - "docstring": "Returns the integer word id of a word string." - }, - { - "code": "def expand_words(self, line, width=60):\n if not line.strip():\n return line\n wordi = 1\n while len(strip_codes(line)) < width:\n wordendi = self.find_word_end(line, wordi)\n if wordendi < 0:\n wordi = 1\n wordendi = self.find_word_end(line, wordi)\n if wordendi < 0:\n line = ''.join((' ', line))\n else:\n line = ' '.join((line[:wordendi], line[wordendi:]))\n wordi += 1\n if ' ' not in strip_codes(line).strip():\n return line.replace(' ', '')\n return line", - "docstring": "Insert spaces between words until it is wide enough for `width`." - }, - { - "code": "def conference_hangup(self, call_params):\n path = '/' + self.api_version + '/ConferenceHangup/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Conference Hangup helper" - }, - { - "code": "def write_extracted_licenses(lics, out):\n write_value('LicenseID', lics.identifier, out)\n if lics.full_name is not None:\n write_value('LicenseName', lics.full_name, out)\n if lics.comment is not None:\n write_text_value('LicenseComment', lics.comment, out)\n for xref in sorted(lics.cross_ref):\n write_value('LicenseCrossReference', xref, out)\n write_text_value('ExtractedText', lics.text, out)", - "docstring": "Write extracted licenses fields to out." - }, - { - "code": "def _parse_allele_data(self):\n pref_freq, frequencies = self._parse_frequencies()\n info_clnvar_single_tags = ['ALLELEID', 'CLNSIG', 'CLNHGVS']\n cln_data = {x.lower(): self.info[x] if x in self.info else None\n for x in info_clnvar_single_tags}\n cln_data.update(\n {'clndisdb': [x.split(',') for x in\n self.info['CLNDISDB'].split('|')]\n if 'CLNDISDB' in self.info else []})\n cln_data.update({'clndn': self.info['CLNDN'].split('|') if\n 'CLNDN' in self.info else []})\n cln_data.update({'clnvi': self.info['CLNVI'].split(',')\n if 'CLNVI' in self.info else []})\n try:\n sequence = self.alt_alleles[0]\n except IndexError:\n sequence = self.ref_allele\n allele = ClinVarAllele(frequency=pref_freq, sequence=sequence,\n **cln_data)\n if not cln_data['clnsig']:\n return []\n return [allele]", - "docstring": "Parse alleles for ClinVar VCF, overrides parent method." - }, - { - "code": "def weekday(when, weekday, start=mon):\n if isinstance(when, datetime):\n when = when.date()\n today = when.weekday()\n delta = weekday - today\n if weekday < start and today >= start:\n delta += 7\n elif weekday >= start and today < start:\n delta -= 7\n return when + timedelta(days=delta)", - "docstring": "Return the date for the day of this week." - }, - { - "code": "def _configure_users(self, site=None, full=0, only_data=0):\n site = site or ALL\n full = int(full)\n if full and not only_data:\n packager = self.get_satchel('packager')\n packager.install_required(type=SYSTEM, service=self.name)\n r = self.local_renderer\n params = self.get_user_vhosts(site=site)\n with settings(warn_only=True):\n self.add_admin_user()\n params = sorted(list(params))\n if not only_data:\n for user, password, vhost in params:\n r.env.broker_user = user\n r.env.broker_password = password\n r.env.broker_vhost = vhost\n with settings(warn_only=True):\n r.sudo('rabbitmqctl add_user {broker_user} {broker_password}')\n r.sudo('rabbitmqctl add_vhost {broker_vhost}')\n r.sudo('rabbitmqctl set_permissions -p {broker_vhost} {broker_user} \".*\" \".*\" \".*\"')\n r.sudo('rabbitmqctl set_permissions -p {broker_vhost} {admin_username} \".*\" \".*\" \".*\"')\n return params", - "docstring": "Installs and configures RabbitMQ." - }, - { - "code": "def detect(self):\n self.log.info(\"initializing AP detection on all sweeps...\")\n t1=cm.timeit()\n for sweep in range(self.abf.sweeps):\n self.detectSweep(sweep)\n self.log.info(\"AP analysis of %d sweeps found %d APs (completed in %s)\",\n self.abf.sweeps,len(self.APs),cm.timeit(t1))", - "docstring": "runs AP detection on every sweep." - }, - { - "code": "def snippets(self, timeout=None):\n return self._api_request(\n self.SNIPPETS_ENDPOINT,\n self.HTTP_GET,\n timeout=timeout\n )", - "docstring": "API call to get list of snippets" - }, - { - "code": "def histogram_stretch(self, use_bands, **kwargs):\n data = self._read(self[use_bands,...], **kwargs)\n data = np.rollaxis(data.astype(np.float32), 0, 3)\n return self._histogram_stretch(data, **kwargs)", - "docstring": "entry point for contrast stretching" - }, - { - "code": "def exists(self):\n if not isinstance(self.val, str_types):\n raise TypeError('val is not a path')\n if not os.path.exists(self.val):\n self._err('Expected <%s> to exist, but was not found.' % self.val)\n return self", - "docstring": "Asserts that val is a path and that it exists." - }, - { - "code": "def verify_edx_resources():\n required_methods = {\n 'ProgramDataExtender': ProgramDataExtender,\n }\n for method in required_methods:\n if required_methods[method] is None:\n raise NotConnectedToOpenEdX(\n _(\"The following method from the Open edX platform is necessary for this view but isn't available.\")\n + \"\\nUnavailable: {method}\".format(method=method)\n )", - "docstring": "Ensure that all necessary resources to render the view are present." - }, - { - "code": "def _print_tree(self):\n objects = self._bluez.GetManagedObjects()\n for path in objects.keys():\n print(\"[ %s ]\" % (path))\n interfaces = objects[path]\n for interface in interfaces.keys():\n if interface in [\"org.freedesktop.DBus.Introspectable\",\n \"org.freedesktop.DBus.Properties\"]:\n continue\n print(\" %s\" % (interface))\n properties = interfaces[interface]\n for key in properties.keys():\n print(\" %s = %s\" % (key, properties[key]))", - "docstring": "Print tree of all bluez objects, useful for debugging." - }, - { - "code": "def settings_and_attributes(self):\n attrs = self.setting_values()\n attrs.update(self.__dict__)\n skip = [\"_instance_settings\", \"aliases\"]\n for a in skip:\n del attrs[a]\n return attrs", - "docstring": "Return a combined dictionary of setting values and attribute values." - }, - { - "code": "def _fmt_args_kwargs(self, *some_args, **some_kwargs):\n if some_args:\n out_args = str(some_args).lstrip('(').rstrip(',)')\n if some_kwargs:\n out_kwargs = ', '.join([str(i).lstrip('(').rstrip(')').replace(', ',': ') for i in [\n (k,some_kwargs[k]) for k in sorted(some_kwargs.keys())]])\n if some_args and some_kwargs:\n return out_args + ', ' + out_kwargs\n elif some_args:\n return out_args\n elif some_kwargs:\n return out_kwargs\n else:\n return ''", - "docstring": "Helper to convert the given args and kwargs into a string." - }, - { - "code": "def update(self, server):\n for chunk in self.__cut_to_size():\n server.put(\n 'tasks_admin',\n chunk.as_payload(),\n replacements={\n 'slug': chunk.challenge.slug})", - "docstring": "Update existing tasks on the server" - }, - { - "code": "def require_http_allowed_method(cls, request):\n allowed = cls.meta.http_allowed_methods\n if request.method not in allowed:\n raise http.exceptions.MethodNotAllowed(allowed)", - "docstring": "Ensure that we're allowed to use this HTTP method." - }, - { - "code": "def _dec_to_dot(ip):\n first = int((ip >> 24) & 255)\n second = int((ip >> 16) & 255)\n third = int((ip >> 8) & 255)\n fourth = int(ip & 255)\n return '%d.%d.%d.%d' % (first, second, third, fourth)", - "docstring": "Decimal to dotted decimal notation conversion." - }, - { - "code": "def quick_doc(request_data):\n code = request_data['code']\n line = request_data['line'] + 1\n column = request_data['column']\n path = request_data['path']\n encoding = 'utf-8'\n script = jedi.Script(code, line, column, path, encoding)\n try:\n definitions = script.goto_definitions()\n except jedi.NotFoundError:\n return []\n else:\n ret_val = [d.docstring() for d in definitions]\n return ret_val", - "docstring": "Worker that returns the documentation of the symbol under cursor." - }, - { - "code": "def sample(self):\n \"Return a random sample from the distribution.\"\n if self.sampler is None:\n self.sampler = weighted_sampler(self.dictionary.keys(),\n self.dictionary.values())\n return self.sampler()", - "docstring": "Return a random sample from the distribution." - }, - { - "code": "def codestr2rst(codestr, lang='python'):\n code_directive = \"\\n.. code-block:: {0}\\n\\n\".format(lang)\n indented_block = indent(codestr, ' ' * 4)\n return code_directive + indented_block", - "docstring": "Return reStructuredText code block from code string" - }, - { - "code": "def _matchOther(self, obj, **kwargs):\n if obj is not None:\n if self._findFirstR(**kwargs):\n return obj._match(**kwargs)\n return False", - "docstring": "Perform _match but on another object, not self." - }, - { - "code": "def clean_notify(self):\n return self.cleaned_data.get(self.Fields.NOTIFY, self.NotificationTypes.DEFAULT)", - "docstring": "Clean the notify_on_enrollment field." - }, - { - "code": "def period(self):\n start_time = self.root.findtext('daily_start_time')\n if start_time:\n return Period(text_to_time(start_time), text_to_time(self.root.findtext('daily_end_time')))\n return Period(datetime.time(0, 0), datetime.time(23, 59))", - "docstring": "A Period tuple representing the daily start and end time." - }, - { - "code": "def commit(self, *args, **kwargs):\n return super(Deposit, self).commit(*args, **kwargs)", - "docstring": "Store changes on current instance in database and index it." - }, - { - "code": "def call(self, this, args=()):\n if self.is_native:\n _args = SpaceTuple(\n args\n )\n _args.space = self.space\n return self.code(\n this, _args\n )\n else:\n return self.space.exe._call(self, this,\n args)", - "docstring": "Dont use this method from inside bytecode to call other bytecode." - }, - { - "code": "def to_bool(option,value):\n if type(value) is str:\n if value.lower() == 'true':\n value=True\n elif value.lower() == 'false':\n value=False\n return (option,value)", - "docstring": "Converts string values to booleans when appropriate" - }, - { - "code": "def _citation_processor(self, retrieved):\n items = []\n for cit in retrieved.entries:\n items.append(cit[\"content\"][0][\"value\"])\n self.url_params = None\n return items", - "docstring": "Return a list of strings formatted as HTML citation entries" - }, - { - "code": "def _format_range_dt(self, d):\n if not isinstance(d, six.string_types):\n d = d.isoformat()\n return '{0}||/{1}'.format(\n d, self.dt_rounding_map[self.aggregation_interval])", - "docstring": "Format range filter datetime to the closest aggregation interval." - }, - { - "code": "def _draw(self):\n if self.display:\n print(self._formatstr.format(**self.__dict__), end='')\n sys.stdout.flush()", - "docstring": "Interal draw method, simply prints to screen" - }, - { - "code": "def digest_auth_user(self, realm, user_name, environ):\n user = self._get_realm_entry(realm, user_name)\n if user is None:\n return False\n password = user.get(\"password\")\n environ[\"wsgidav.auth.roles\"] = user.get(\"roles\", [])\n return self._compute_http_digest_a1(realm, user_name, password)", - "docstring": "Computes digest hash A1 part." - }, - { - "code": "def swarm(self, x, y, r=100):\n sc = _ctx.stroke(0, 0, 0, 0)\n sw = _ctx.strokewidth(0)\n _ctx.push()\n _ctx.transform(_ctx.CORNER)\n _ctx.translate(x, y)\n for i in _range(r * 3):\n clr = choice(self).copy()\n clr.alpha -= 0.5 * random()\n _ctx.fill(clr)\n clr = choice(self)\n _ctx.stroke(clr)\n _ctx.strokewidth(10 * random())\n _ctx.rotate(360 * random())\n r2 = r * 0.5 * random()\n _ctx.oval(r * random(), 0, r2, r2)\n _ctx.pop()\n _ctx.strokewidth(sw)\n if sc is None:\n _ctx.nostroke()\n else:\n _ctx.stroke(sc)", - "docstring": "Fancy random ovals for all the colors in the list." - }, - { - "code": "def __recursive_transform(self, jam, steps):\n if len(steps) > 0:\n head_transformer = steps[0][1]\n for t_jam in head_transformer.transform(jam):\n for q in self.__recursive_transform(t_jam, steps[1:]):\n yield q\n else:\n yield jam", - "docstring": "A recursive transformation pipeline" - }, - { - "code": "def write_lines(self, data):\n lines = data.replace('\\r', '').split('\\n')\n for line in lines:\n self.__exchange(line)", - "docstring": "write lines, one by one, separated by \\n to device" - }, - { - "code": "def __serial_transform(self, jam, steps):\n if six.PY2:\n attr = 'next'\n else:\n attr = '__next__'\n pending = len(steps)\n nexts = itertools.cycle(getattr(iter(D.transform(jam)), attr)\n for (name, D) in steps)\n while pending:\n try:\n for next_jam in nexts:\n yield next_jam()\n except StopIteration:\n pending -= 1\n nexts = itertools.cycle(itertools.islice(nexts, pending))", - "docstring": "A serial transformation union" - }, - { - "code": "def build_index_and_mapping(triples):\n ents = bidict()\n rels = bidict()\n ent_id = 0\n rel_id = 0\n collected = []\n for t in triples:\n for e in (t.head, t.tail):\n if e not in ents:\n ents[e] = ent_id\n ent_id += 1\n if t.relation not in rels:\n rels[t.relation] = rel_id\n rel_id += 1\n collected.append(kgedata.TripleIndex(ents[t.head], rels[t.relation], ents[t.tail]))\n return collected, ents, rels", - "docstring": "index all triples into indexes and return their mappings" - }, - { - "code": "async def _async_main(example_coroutine, client, args):\n task = asyncio.ensure_future(client.connect())\n on_connect = asyncio.Future()\n client.on_connect.add_observer(lambda: on_connect.set_result(None))\n done, _ = await asyncio.wait(\n (on_connect, task), return_when=asyncio.FIRST_COMPLETED\n )\n await asyncio.gather(*done)\n try:\n await example_coroutine(client, args)\n except asyncio.CancelledError:\n pass\n finally:\n await client.disconnect()\n await task", - "docstring": "Run the example coroutine." - }, - { - "code": "def ancestors(self):\n ancestors = set([])\n self._depth_ascend(self, ancestors)\n try:\n ancestors.remove(self)\n except KeyError:\n pass\n return list(ancestors)", - "docstring": "Returns a list of the ancestors of this node." - }, - { - "code": "def read_chunk(filename, offset=-1, length=-1, escape_data=False):\n try:\n length = int(length)\n offset = int(offset)\n except ValueError:\n return {}\n if not os.path.isfile(filename):\n return {}\n try:\n fstat = os.stat(filename)\n except Exception:\n return {}\n if offset == -1:\n offset = fstat.st_size\n if length == -1:\n length = fstat.st_size - offset\n with open(filename, \"r\") as fp:\n fp.seek(offset)\n try:\n data = fp.read(length)\n except IOError:\n return {}\n if data:\n data = _escape_data(data) if escape_data else data\n return dict(offset=offset, length=len(data), data=data)\n return dict(offset=offset, length=0)", - "docstring": "Read a chunk of a file from an offset upto the length." - }, - { - "code": "def remove_all_properties(self, recursive):\n if self.provider.prop_manager:\n self.provider.prop_manager.remove_properties(\n self.get_ref_url(), self.environ\n )", - "docstring": "Remove all associated dead properties." - }, - { - "code": "def pad(num, n=2, sign=False):\n s = unicode(abs(num))\n if len(s) < n:\n s = '0' * (n - len(s)) + s\n if not sign:\n return s\n if num >= 0:\n return '+' + s\n else:\n return '-' + s", - "docstring": "returns n digit string representation of the num" - }, - { - "code": "def new_source(self, source):\n source_streamlet = None\n if callable(source):\n source_streamlet = SupplierStreamlet(source)\n elif isinstance(source, Generator):\n source_streamlet = GeneratorStreamlet(source)\n else:\n raise RuntimeError(\"Builder's new source has to be either a Generator or a function\")\n self._sources.append(source_streamlet)\n return source_streamlet", - "docstring": "Adds a new source to the computation DAG" - }, - { - "code": "def _move_session_handler(handlers):\n index = 0\n for i, handler in enumerate(handlers):\n if isinstance(handler, SessionHandler):\n index = i\n break\n if index:\n handlers[:index + 1] = [handlers[index]] + handlers[:index]", - "docstring": "Find a SessionHandler instance in the list and move it to the beginning." - }, - { - "code": "def pop_data(self, nbytes):\n last_timestamp = 0\n data = []\n for packet in self.pop(nbytes):\n last_timestamp = packet.timestamp\n data.append(packet.data.data)\n return ''.join(data), last_timestamp", - "docstring": "similar to pop, but returns payload + last timestamp" - }, - { - "code": "def forms_valid(self, form, inlines):\n response = self.form_valid(form)\n for formset in inlines:\n formset.save()\n return response", - "docstring": "If the form and formsets are valid, save the associated models." - }, - { - "code": "def _load_savefile_header(file_h):\n try:\n raw_savefile_header = file_h.read(24)\n except UnicodeDecodeError:\n print(\"\\nMake sure the input file is opened in read binary, 'rb'\\n\")\n raise InvalidEncoding(\"Could not read file; it might not be opened in binary mode.\")\n if raw_savefile_header[:4] in [struct.pack(\">I\", _MAGIC_NUMBER),\n struct.pack(\">I\", _MAGIC_NUMBER_NS)]:\n byte_order = b'big'\n unpacked = struct.unpack('>IhhIIII', raw_savefile_header)\n elif raw_savefile_header[:4] in [struct.pack(\"\": \", \".join(masters_in_quotes)})", - "docstring": "Template slave config file" - }, - { - "code": "def from_spec(spec, kwargs=None):\n baseline = util.get_object(\n obj=spec,\n predefined_objects=tensorforce.core.baselines.baselines,\n kwargs=kwargs\n )\n assert isinstance(baseline, Baseline)\n return baseline", - "docstring": "Creates a baseline from a specification dict." - }, - { - "code": "def store_providers(self, provider_data):\n if not hasattr(provider_data, '__iter__'):\n raise OEmbedException('Autodiscovered response not iterable')\n provider_pks = []\n for provider in provider_data:\n if 'endpoint' not in provider or \\\n 'matches' not in provider:\n continue\n resource_type = provider.get('type')\n if resource_type not in RESOURCE_TYPES:\n continue\n stored_provider, created = StoredProvider.objects.get_or_create(\n wildcard_regex=provider['matches']\n )\n if created:\n stored_provider.endpoint_url = relative_to_full( \n provider['endpoint'],\n provider['matches']\n )\n stored_provider.resource_type = resource_type\n stored_provider.save()\n provider_pks.append(stored_provider.pk)\n return StoredProvider.objects.filter(pk__in=provider_pks)", - "docstring": "Iterate over the returned json and try to sort out any new providers" - }, - { - "code": "def getNotesForABF(abfFile):\n parent=getParent(abfFile)\n parent=os.path.basename(parent).replace(\".abf\",\"\")\n expFile=os.path.dirname(abfFile)+\"/experiment.txt\"\n if not os.path.exists(expFile):\n return \"no experiment file\"\n with open(expFile) as f:\n raw=f.readlines()\n for line in raw:\n if line[0]=='~':\n line=line[1:].strip()\n if line.startswith(parent):\n while \"\\t\\t\" in line:\n line=line.replace(\"\\t\\t\",\"\\t\")\n line=line.replace(\"\\t\",\"\\n\")\n return line\n return \"experiment.txt found, but didn't contain %s\"%parent", - "docstring": "given an ABF, find the parent, return that line of experiments.txt" - }, - { - "code": "def _as_dict(self, r):\n d = dict()\n for i, f in enumerate(self._field_names):\n d[f] = r[i] if i < len(r) else None\n return d", - "docstring": "Convert the record to a dictionary using field names as keys." - }, - { - "code": "def fetch_course_organizations(course_key):\n queryset = internal.OrganizationCourse.objects.filter(\n course_id=text_type(course_key),\n active=True\n ).select_related('organization')\n return [serializers.serialize_organization_with_course(organization) for organization in queryset]", - "docstring": "Retrieves the organizations linked to the specified course" - }, - { - "code": "def _get_lookup_spec(identifier):\n if identifier.startswith('+'):\n return hangups.hangouts_pb2.EntityLookupSpec(\n phone=identifier, create_offnetwork_gaia=True\n )\n elif '@' in identifier:\n return hangups.hangouts_pb2.EntityLookupSpec(\n email=identifier, create_offnetwork_gaia=True\n )\n else:\n return hangups.hangouts_pb2.EntityLookupSpec(gaia_id=identifier)", - "docstring": "Return EntityLookupSpec from phone number, email address, or gaia ID." - }, - { - "code": "def clear_sent_messages(self, offset=None):\n if offset is None:\n offset = getattr(settings, 'MAILQUEUE_CLEAR_OFFSET', defaults.MAILQUEUE_CLEAR_OFFSET)\n if type(offset) is int:\n offset = datetime.timedelta(hours=offset)\n delete_before = timezone.now() - offset\n self.filter(sent=True, last_attempt__lte=delete_before).delete()", - "docstring": "Deletes sent MailerMessage records" - }, - { - "code": "def _receive(self, msg):\n msg = self._convert(msg)\n if msg is None:\n return\n str_msg = self.verbose and self._msg_to_str(msg)\n if self.verbose and log.is_debug():\n log.debug('Message %s', str_msg)\n if self.pre_routing:\n self.pre_routing.receive(msg)\n receiver, msg = self.routing.receive(msg)\n if receiver:\n receiver.receive(msg)\n if self.verbose:\n log.info('Routed message %s (%s) to %s', str_msg[:128], msg,\n repr(receiver))", - "docstring": "Receive a message from the input source and perhaps raise an Exception." - }, - { - "code": "def export(self, output_path=None, decrypt=False):\n self._assert_valid_stash()\n all_keys = []\n for key in self.list():\n all_keys.append(dict(self.get(key, decrypt=decrypt)))\n if all_keys:\n if output_path:\n with open(output_path, 'w') as output_file:\n output_file.write(json.dumps(all_keys, indent=4))\n return all_keys\n else:\n raise GhostError('There are no keys to export')", - "docstring": "Export all keys in the stash to a list or a file" - }, - { - "code": "def _localize_inputs_recursive_command(self, task_dir, inputs):\n data_dir = os.path.join(task_dir, _DATA_SUBDIR)\n provider_commands = [\n providers_util.build_recursive_localize_command(data_dir, inputs,\n file_provider)\n for file_provider in _SUPPORTED_INPUT_PROVIDERS\n ]\n return '\\n'.join(provider_commands)", - "docstring": "Returns a command that will stage recursive inputs." - }, - { - "code": "def start_tag(self):\n direct_attributes = (attribute.render(self) for attribute in self.render_attributes)\n attributes = ()\n if hasattr(self, '_attributes'):\n attributes = ('{0}=\"{1}\"'.format(key, value)\n for key, value in self.attributes.items() if value)\n rendered_attributes = \" \".join(filter(bool, chain(direct_attributes, attributes)))\n return '<{0}{1}{2}{3}>'.format(self.tag, ' ' if rendered_attributes else '',\n rendered_attributes, ' /' if self.tag_self_closes else \"\")", - "docstring": "Returns the elements HTML start tag" - }, - { - "code": "def uptime():\n if __boottime is not None:\n return time.time() - __boottime\n return {'amiga': _uptime_amiga,\n 'aros12': _uptime_amiga,\n 'beos5': _uptime_beos,\n 'cygwin': _uptime_linux,\n 'darwin': _uptime_osx,\n 'haiku1': _uptime_beos,\n 'linux': _uptime_linux,\n 'linux-armv71': _uptime_linux,\n 'linux2': _uptime_linux,\n 'mac': _uptime_mac,\n 'minix3': _uptime_minix,\n 'riscos': _uptime_riscos,\n 'sunos5': _uptime_solaris,\n 'syllable': _uptime_syllable,\n 'win32': _uptime_windows,\n 'wince': _uptime_windows}.get(sys.platform, _uptime_bsd)() or \\\n _uptime_bsd() or _uptime_plan9() or _uptime_linux() or \\\n _uptime_windows() or _uptime_solaris() or _uptime_beos() or \\\n _uptime_amiga() or _uptime_riscos() or _uptime_posix() or \\\n _uptime_syllable() or _uptime_mac() or _uptime_osx()", - "docstring": "Returns uptime in seconds if even remotely possible, or None if not." - }, - { - "code": "def make_param(self, name, raw_uri, disk_size):\n if raw_uri.startswith('https://www.googleapis.com/compute'):\n docker_path = self._parse_image_uri(raw_uri)\n return job_model.PersistentDiskMountParam(\n name, raw_uri, docker_path, disk_size, disk_type=None)\n elif raw_uri.startswith('file://'):\n local_path, docker_path = self._parse_local_mount_uri(raw_uri)\n return job_model.LocalMountParam(name, raw_uri, docker_path, local_path)\n elif raw_uri.startswith('gs://'):\n docker_path = self._parse_gcs_uri(raw_uri)\n return job_model.GCSMountParam(name, raw_uri, docker_path)\n else:\n raise ValueError(\n 'Mount parameter {} must begin with valid prefix.'.format(raw_uri))", - "docstring": "Return a MountParam given a GCS bucket, disk image or local path." - }, - { - "code": "def go_str(value):\n io = StringIO.StringIO()\n io.write('\"')\n for c in value:\n if c in _ESCAPES:\n io.write(_ESCAPES[c])\n elif c in _SIMPLE_CHARS:\n io.write(c)\n else:\n io.write(r'\\x{:02x}'.format(ord(c)))\n io.write('\"')\n return io.getvalue()", - "docstring": "Returns value as a valid Go string literal." - }, - { - "code": "def determineProtocol(fname):\n f=open(fname,'rb')\n raw=f.read(5000)\n f.close()\n protoComment=\"unknown\"\n if b\"SWHLab4[\" in raw:\n protoComment=raw.split(b\"SWHLab4[\")[1].split(b\"]\",1)[0]\n elif b\"SWH[\" in raw:\n protoComment=raw.split(b\"SWH[\")[1].split(b\"]\",1)[0]\n else:\n protoComment=\"?\"\n if not type(protoComment) is str:\n protoComment=protoComment.decode(\"utf-8\")\n return protoComment", - "docstring": "determine the comment cooked in the protocol." - }, - { - "code": "def load(self):\r\n self._open_image()\r\n components, data = image_data(self.image)\r\n texture = self.ctx.texture(\r\n self.image.size,\r\n components,\r\n data,\r\n )\r\n texture.extra = {'meta': self.meta}\r\n if self.meta.mipmap:\r\n texture.build_mipmaps()\r\n self._close_image()\r\n return texture", - "docstring": "Load a 2d texture" - }, - { - "code": "def parse_doc (json_iter):\n global DEBUG\n for meta in json_iter:\n base_idx = 0\n for graf_text in filter_quotes(meta[\"text\"], is_email=False):\n if DEBUG:\n print(\"graf_text:\", graf_text)\n grafs, new_base_idx = parse_graf(meta[\"id\"], graf_text, base_idx)\n base_idx = new_base_idx\n for graf in grafs:\n yield graf", - "docstring": "parse one document to prep for TextRank" - }, - { - "code": "def relation_set_has_contradictions(relations: Set[str]) -> bool:\n has_increases = any(relation in CAUSAL_INCREASE_RELATIONS for relation in relations)\n has_decreases = any(relation in CAUSAL_DECREASE_RELATIONS for relation in relations)\n has_cnc = any(relation == CAUSES_NO_CHANGE for relation in relations)\n return 1 < sum([has_cnc, has_decreases, has_increases])", - "docstring": "Return if the set of BEL relations contains a contradiction." - }, - { - "code": "def line(self):\n line, column = self.source_buffer.decompose_position(self.begin_pos)\n return line", - "docstring": "Returns the line number of the beginning of this range." - }, - { - "code": "def _wait_process_std_out_err(self, name, process):\n proc.stream_process_stdout(process, stdout_log_fn(name))\n process.wait()", - "docstring": "Wait for the termination of a process and log its stdout & stderr" - }, - { - "code": "def check(self, text: str, srctext=None) -> [Match]:\n root = self._get_root(self._url, self._encode(text, srctext))\n return [Match(e.attrib) for e in root if e.tag == 'error']", - "docstring": "Match text against enabled rules." - }, - { - "code": "def boottime():\n global __boottime\n if __boottime is None:\n up = uptime()\n if up is None:\n return None\n if __boottime is None:\n _boottime_linux()\n if datetime is None:\n raise RuntimeError('datetime module required.')\n return datetime.fromtimestamp(__boottime or time.time() - up)", - "docstring": "Returns boot time if remotely possible, or None if not." - }, - { - "code": "def augment_with_ngrams(unigrams, unigram_vocab_size, n_buckets, n=2):\n def get_ngrams(n):\n return list(zip(*[unigrams[i:] for i in range(n)]))\n def hash_ngram(ngram):\n bytes_ = array.array('L', ngram).tobytes()\n hash_ = int(hashlib.sha256(bytes_).hexdigest(), 16)\n return unigram_vocab_size + hash_ % n_buckets\n return unigrams + [hash_ngram(ngram) for i in range(2, n + 1) for ngram in get_ngrams(i)]", - "docstring": "Augment unigram features with hashed n-gram features." - }, - { - "code": "def _get_date_str(timestamp, datetimefmt, show_date=False):\n fmt = ''\n if show_date:\n fmt += '\\n'+datetimefmt.get('date', '')+'\\n'\n fmt += datetimefmt.get('time', '')\n return timestamp.astimezone(tz=None).strftime(fmt)", - "docstring": "Convert UTC datetime into user interface string." - }, - { - "code": "def invitations():\n page = request.args.get('page', 1, type=int)\n per_page = request.args.get('per_page', 5, type=int)\n memberships = Membership.query_invitations(current_user, eager=True).all()\n return render_template(\n 'invenio_groups/pending.html',\n memberships=memberships,\n page=page,\n per_page=per_page,\n )", - "docstring": "List all user pending memberships." - }, - { - "code": "def _generate(self):\n candidates = np.array(range(self._n), np.uint32)\n for i in xrange(self._num):\n self._random.shuffle(candidates)\n pattern = candidates[0:self._getW()]\n self._patterns[i] = set(pattern)", - "docstring": "Generates set of random patterns." - }, - { - "code": "def _get_function_ptr(self, name):\r\n func = _make_function_ptr_instance\r\n self._function_ptrs.setdefault(name, func(self, name))\r\n return self._function_ptrs[name]", - "docstring": "Get or create a function pointer of the given name." - }, - { - "code": "def create_switch(apps, schema_editor):\n Switch = apps.get_model('waffle', 'Switch')\n Switch.objects.update_or_create(name=ENTERPRISE_ROLE_BASED_ACCESS_CONTROL_SWITCH, defaults={'active': False})", - "docstring": "Create the `role_based_access_control` switch if it does not already exist." - }, - { - "code": "def Expect(inner_rule, loc=None):\n @llrule(loc, inner_rule.expected)\n def rule(parser):\n result = inner_rule(parser)\n if result is unmatched:\n expected = reduce(list.__add__, [rule.expected(parser) for rule in parser._errrules])\n expected = list(sorted(set(expected)))\n if len(expected) > 1:\n expected = \" or \".join([\", \".join(expected[0:-1]), expected[-1]])\n elif len(expected) == 1:\n expected = expected[0]\n else:\n expected = \"(impossible)\"\n error_tok = parser._tokens[parser._errindex]\n error = diagnostic.Diagnostic(\n \"fatal\", \"unexpected {actual}: expected {expected}\",\n {\"actual\": error_tok.kind, \"expected\": expected},\n error_tok.loc)\n parser.diagnostic_engine.process(error)\n return result\n return rule", - "docstring": "A rule that executes ``inner_rule`` and emits a diagnostic error if it returns None." - }, - { - "code": "async def sync_recent_conversations(\n self, sync_recent_conversations_request\n ):\n response = hangouts_pb2.SyncRecentConversationsResponse()\n await self._pb_request('conversations/syncrecentconversations',\n sync_recent_conversations_request,\n response)\n return response", - "docstring": "Return info on recent conversations and their events." - }, - { - "code": "def _process_files(self, record_id, data):\n if self.files:\n assert not self.files.bucket.locked\n self.files.bucket.locked = True\n snapshot = self.files.bucket.snapshot(lock=True)\n data['_files'] = self.files.dumps(bucket=snapshot.id)\n yield data\n db.session.add(RecordsBuckets(\n record_id=record_id, bucket_id=snapshot.id\n ))\n else:\n yield data", - "docstring": "Snapshot bucket and add files in record during first publishing." - }, - { - "code": "def load(self):\r\n self._open_image()\r\n width, height, depth = self.image.size[0], self.image.size[1] // self.layers, self.layers\r\n components, data = image_data(self.image)\r\n texture = self.ctx.texture_array(\r\n (width, height, depth),\r\n components,\r\n data,\r\n )\r\n texture.extra = {'meta': self.meta}\r\n if self.meta.mipmap:\r\n texture.build_mipmaps()\r\n self._close_image()\r\n return texture", - "docstring": "Load a texture array" - }, - { - "code": "def read_and_parse_roles(cl_args):\n roles = dict()\n with open(get_inventory_file(cl_args), 'r') as stream:\n try:\n roles = yaml.load(stream)\n except yaml.YAMLError as exc:\n Log.error(\"Error parsing inventory file: %s\" % exc)\n sys.exit(-1)\n if Role.ZOOKEEPERS not in roles or not roles[Role.ZOOKEEPERS]:\n Log.error(\"Zookeeper servers node defined!\")\n sys.exit(-1)\n if Role.CLUSTER not in roles or not roles[Role.CLUSTER]:\n Log.error(\"Heron cluster nodes defined!\")\n sys.exit(-1)\n roles[Role.MASTERS] = set([roles[Role.CLUSTER][0]])\n roles[Role.SLAVES] = set(roles[Role.CLUSTER])\n roles[Role.ZOOKEEPERS] = set(roles[Role.ZOOKEEPERS])\n roles[Role.CLUSTER] = set(roles[Role.CLUSTER])\n return roles", - "docstring": "read config files to get roles" - }, - { - "code": "def _sm_to_pain(self, *args, **kwargs):\n _logger.info(\"Starting chaos for blockade %s\" % self._blockade_name)\n self._do_blockade_event()\n millisec = random.randint(self._run_min_time, self._run_max_time)\n self._timer = threading.Timer(millisec / 1000.0, self.event_timeout)\n self._timer.start()", - "docstring": "Start the blockade event" - }, - { - "code": "def output_image_link(self, m):\n return self.renderer.image_link(\n m.group('url'), m.group('target'), m.group('alt'))", - "docstring": "Pass through rest role." - }, - { - "code": "def rename(self, from_name, to_name):\n log.info('renaming database from %s to %s' % (from_name, to_name))\n self._run_stmt('alter database %s rename to %s' % (from_name, to_name))", - "docstring": "Renames an existing database." - }, - { - "code": "def _nginx_location_spec(port_spec, bridge_ip):\n location_string_spec = \"\\t \\t location / { \\n\"\n for location_setting in ['proxy_http_version 1.1;',\n 'proxy_set_header Upgrade $http_upgrade;',\n 'proxy_set_header Connection \"upgrade\";',\n 'proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;',\n 'proxy_set_header Host $http_host;',\n _nginx_proxy_string(port_spec, bridge_ip)]:\n location_string_spec += \"\\t \\t \\t {} \\n\".format(location_setting)\n location_string_spec += \"\\t \\t } \\n\"\n return location_string_spec", - "docstring": "This will output the nginx location config string for specific port spec" - }, - { - "code": "def ensure_dir_exists(func):\r\n\t\"wrap a function that returns a dir, making sure it exists\"\r\n\t@functools.wraps(func)\r\n\tdef make_if_not_present():\r\n\t\tdir = func()\r\n\t\tif not os.path.isdir(dir):\r\n\t\t\tos.makedirs(dir)\r\n\t\treturn dir\r\n\treturn make_if_not_present", - "docstring": "wrap a function that returns a dir, making sure it exists" - }, - { - "code": "def _initEphemerals(self):\n self._firstComputeCall = True\n self._accuracy = None\n self._protoScores = None\n self._categoryDistances = None\n self._knn = knn_classifier.KNNClassifier(**self.knnParams)\n for x in ('_partitions', '_useAuxiliary', '_doSphering',\n '_scanInfo', '_protoScores'):\n if not hasattr(self, x):\n setattr(self, x, None)", - "docstring": "Initialize attributes that are not saved with the checkpoint." - }, - { - "code": "def update_sent_packet(self, sent_pkt_size_bytes):\n self.update_count(self.SENT_PKT_COUNT)\n self.update_count(self.SENT_PKT_SIZE, incr_by=sent_pkt_size_bytes)", - "docstring": "Update sent packet metrics" - }, - { - "code": "def createsuperuser(self, username='admin', email=None, password=None, site=None):\n r = self.local_renderer\n site = site or self.genv.SITE\n self.set_site_specifics(site)\n options = ['--username=%s' % username]\n if email:\n options.append('--email=%s' % email)\n if password:\n options.append('--password=%s' % password)\n r.env.options_str = ' '.join(options)\n if self.is_local:\n r.env.project_dir = r.env.local_project_dir\n r.genv.SITE = r.genv.SITE or site\n r.run_or_local('export SITE={SITE}; export ROLE={ROLE}; cd {project_dir}; {manage_cmd} {createsuperuser_cmd} {options_str}')", - "docstring": "Runs the Django createsuperuser management command." - }, - { - "code": "def web(connection, host, port):\n from bio2bel.web.application import create_application\n app = create_application(connection=connection)\n app.run(host=host, port=port)", - "docstring": "Run a combine web interface." - }, - { - "code": "def _byteify(data, ignore_dicts=False):\n if isinstance(data, unicode):\n return data.encode(\"utf-8\")\n if isinstance(data, list):\n return [_byteify(item, ignore_dicts=True) for item in data]\n if isinstance(data, dict) and not ignore_dicts:\n return {\n _byteify(key, ignore_dicts=True): _byteify(value, ignore_dicts=True)\n for key, value in data.iteritems()\n }\n return data", - "docstring": "converts unicode to utf-8 when reading in json files" - }, - { - "code": "def trace_to_next_plane(self):\n return list(map(lambda positions, deflections: np.subtract(positions, deflections),\n self.positions, self.deflections))", - "docstring": "Trace the positions to the next plane." - }, - { - "code": "def course_discovery_api_client(user, catalog_url):\n if JwtBuilder is None:\n raise NotConnectedToOpenEdX(\n _(\"To get a Catalog API client, this package must be \"\n \"installed in an Open edX environment.\")\n )\n jwt = JwtBuilder.create_jwt_for_user(user)\n return EdxRestApiClient(catalog_url, jwt=jwt)", - "docstring": "Return a Course Discovery API client setup with authentication for the specified user." - }, - { - "code": "def contains_sequence(self, *items):\n if len(items) == 0:\n raise ValueError('one or more args must be given')\n else:\n try:\n for i in xrange(len(self.val) - len(items) + 1):\n for j in xrange(len(items)):\n if self.val[i+j] != items[j]:\n break\n else:\n return self\n except TypeError:\n raise TypeError('val is not iterable')\n self._err('Expected <%s> to contain sequence %s, but did not.' % (self.val, self._fmt_items(items)))", - "docstring": "Asserts that val contains the given sequence of items in order." - }, - { - "code": "def assume_localhost(self):\n if not self.genv.host_string:\n self.genv.host_string = 'localhost'\n self.genv.hosts = ['localhost']\n self.genv.user = getpass.getuser()", - "docstring": "Sets connection parameters to localhost, if not set already." - }, - { - "code": "def descendents(self):\n visited = set([])\n self._depth_descend(self, visited)\n try:\n visited.remove(self)\n except KeyError:\n pass\n return list(visited)", - "docstring": "Returns a list of descendents of this node." - }, - { - "code": "def all_named_colors():\n yield from _TO_COLOR_USER.items()\n for name, color in _TO_COLOR.items():\n if name not in _TO_COLOR_USER:\n yield name, color", - "docstring": "Return an iteration over all name, color pairs in tables" - }, - { - "code": "def cmd_join(opts):\n config = load_config(opts.config)\n b = get_blockade(config, opts)\n b.join()", - "docstring": "Restore full networking between containers" - }, - { - "code": "def above(self, ref):\n if not self._valid_ordering_reference(ref):\n raise ValueError(\n \"%r can only be moved above instances of %r which %s equals %r.\" % (\n self, self.__class__, self.order_with_respect_to,\n self._get_order_with_respect_to()\n )\n )\n if self.order == ref.order:\n return\n if self.order > ref.order:\n o = ref.order\n else:\n o = self.get_ordering_queryset().filter(order__lt=ref.order).aggregate(Max('order')).get('order__max') or 0\n self.to(o)", - "docstring": "Move this object above the referenced object." - }, - { - "code": "def cmd_logs(opts):\n config = load_config(opts.config)\n b = get_blockade(config, opts)\n puts(b.logs(opts.container).decode(encoding='UTF-8'))", - "docstring": "Fetch the logs of a container" - }, - { - "code": "def _echo_setting(key):\n value = getattr(settings, key)\n secho('%s: ' % key, fg='magenta', bold=True, nl=False)\n secho(\n six.text_type(value),\n bold=True,\n fg='white' if isinstance(value, six.text_type) else 'cyan',\n )", - "docstring": "Echo a setting to the CLI." - }, - { - "code": "def _hashCoordinate(coordinate):\n coordinateStr = \",\".join(str(v) for v in coordinate)\n hash = int(int(hashlib.md5(coordinateStr).hexdigest(), 16) % (2 ** 64))\n return hash", - "docstring": "Hash a coordinate to a 64 bit integer." - }, - { - "code": "def _run_ext_wsgiutils(app, config, mode):\n from wsgidav.server import ext_wsgiutils_server\n _logger.info(\n \"Running WsgiDAV {} on wsgidav.ext_wsgiutils_server...\".format(__version__)\n )\n _logger.warning(\n \"WARNING: This single threaded server (ext-wsgiutils) is not meant for production.\"\n )\n try:\n ext_wsgiutils_server.serve(config, app)\n except KeyboardInterrupt:\n _logger.warning(\"Caught Ctrl-C, shutting down...\")\n return", - "docstring": "Run WsgiDAV using ext_wsgiutils_server from the wsgidav package." - }, - { - "code": "def popUpItem(self, *args):\n self.Press()\n time.sleep(.5)\n return self._menuItem(self, *args)", - "docstring": "Return the specified item in a pop up menu." - }, - { - "code": "def _process_command(self, command, name=None):\n self._command_counter += 1\n if name is None:\n name = \"CMD{0:03d}\".format(self._command_counter)\n try:\n fd, tmp_ndx = tempfile.mkstemp(suffix='.ndx', prefix='tmp_'+name+'__')\n cmd = [command, '', 'q']\n rc,out,err = self.make_ndx(o=tmp_ndx, input=cmd)\n self.check_output(out, \"No atoms found for selection {command!r}.\".format(**vars()), err=err)\n groups = parse_ndxlist(out)\n last = groups[-1]\n fd, ndx = tempfile.mkstemp(suffix='.ndx', prefix=name+'__')\n name_cmd = [\"keep {0:d}\".format(last['nr']),\n \"name 0 {0!s}\".format(name), 'q']\n rc,out,err = self.make_ndx(n=tmp_ndx, o=ndx, input=name_cmd)\n finally:\n utilities.unlink_gmx(tmp_ndx)\n return name, ndx", - "docstring": "Process ``make_ndx`` command and return name and temp index file." - }, - { - "code": "def reset_lock(self):\n redis_key = self.CELERY_LOCK.format(task_id=self.task_identifier)\n self.celery_self.backend.client.delete(redis_key)", - "docstring": "Removed the lock regardless of timeout." - }, - { - "code": "def __validateExperimentControl(self, control):\n taskList = control.get('tasks', None)\n if taskList is not None:\n taskLabelsList = []\n for task in taskList:\n validateOpfJsonValue(task, \"opfTaskSchema.json\")\n validateOpfJsonValue(task['taskControl'], \"opfTaskControlSchema.json\")\n taskLabel = task['taskLabel']\n assert isinstance(taskLabel, types.StringTypes), \\\n \"taskLabel type: %r\" % type(taskLabel)\n assert len(taskLabel) > 0, \"empty string taskLabel not is allowed\"\n taskLabelsList.append(taskLabel.lower())\n taskLabelDuplicates = filter(lambda x: taskLabelsList.count(x) > 1,\n taskLabelsList)\n assert len(taskLabelDuplicates) == 0, \\\n \"Duplcate task labels are not allowed: %s\" % taskLabelDuplicates\n return", - "docstring": "Validates control dictionary for the experiment context" - }, - { - "code": "async def _seek(self, ctx, *, time: str):\r\n player = self.bot.lavalink.players.get(ctx.guild.id)\r\n if not player.is_playing:\r\n return await ctx.send('Not playing.')\r\n seconds = time_rx.search(time)\r\n if not seconds:\r\n return await ctx.send('You need to specify the amount of seconds to skip!')\r\n seconds = int(seconds.group()) * 1000\r\n if time.startswith('-'):\r\n seconds *= -1\r\n track_time = player.position + seconds\r\n await player.seek(track_time)\r\n await ctx.send(f'Moved track to **{lavalink.Utils.format_time(track_time)}**')", - "docstring": "Seeks to a given position in a track." - }, - { - "code": "def edge(self, id1, id2):\n if id1 in self and \\\n id2 in self and \\\n self[id2] in self[id1].links:\n return self[id1].links.edge(id2)\n return None", - "docstring": "Returns the edge between the nodes with given id1 and id2." - }, - { - "code": "def unquote(s):\n if len(s) > 1:\n if s.startswith('\"') and s.endswith('\"'):\n return s[1:-1].replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n if s.startswith('<') and s.endswith('>'):\n return s[1:-1]\n return s", - "docstring": "Remove quotes from a string." - }, - { - "code": "def log_post(self, url=None, credentials=None, do_verify_certificate=True):\n if url is None:\n url = self.url\n if credentials is None:\n credentials = self.credentials\n if do_verify_certificate is None:\n do_verify_certificate = self.do_verify_certificate\n if credentials and \"base64\" in credentials:\n headers = {\"Content-Type\": \"application/json\", \\\n 'Authorization': 'Basic %s' % credentials[\"base64\"]}\n else:\n headers = {\"Content-Type\": \"application/json\"}\n try:\n request = requests.post(url, headers=headers, \\\n data=self.store.get_json(), verify=do_verify_certificate)\n except httplib.IncompleteRead as e:\n request = e.partial", - "docstring": "Write to a remote host via HTTP POST" - }, - { - "code": "def handle(self, *args, **options):\n LOGGER.info('Starting assigning enterprise roles to users!')\n role = options['role']\n if role == ENTERPRISE_ADMIN_ROLE:\n self._assign_enterprise_role_to_users(self._get_enterprise_admin_users_batch, options)\n elif role == ENTERPRISE_OPERATOR_ROLE:\n self._assign_enterprise_role_to_users(self._get_enterprise_operator_users_batch, options)\n elif role == ENTERPRISE_LEARNER_ROLE:\n self._assign_enterprise_role_to_users(self._get_enterprise_customer_users_batch, options)\n elif role == ENTERPRISE_ENROLLMENT_API_ADMIN_ROLE:\n self._assign_enterprise_role_to_users(self._get_enterprise_enrollment_api_admin_users_batch, options, True)\n elif role == ENTERPRISE_CATALOG_ADMIN_ROLE:\n self._assign_enterprise_role_to_users(self._get_enterprise_catalog_admin_users_batch, options, True)\n else:\n raise CommandError('Please provide a valid role name. Supported roles are {admin} and {learner}'.format(\n admin=ENTERPRISE_ADMIN_ROLE,\n learner=ENTERPRISE_LEARNER_ROLE\n ))\n LOGGER.info('Successfully finished assigning enterprise roles to users!')", - "docstring": "Entry point for managment command execution." - }, - { - "code": "def multi_run_epicom(graphs: Iterable[BELGraph], path: Union[None, str, TextIO]) -> None:\n if isinstance(path, str):\n with open(path, 'w') as file:\n _multi_run_helper_file_wrapper(graphs, file)\n else:\n _multi_run_helper_file_wrapper(graphs, path)", - "docstring": "Run EpiCom analysis on many graphs." - }, - { - "code": "def _kwargs(self):\n return dict(color=self.color, velocity=self.velocity, colors=self.colors)", - "docstring": "Keyword arguments for recreating the Shape from the vertices." - }, - { - "code": "def manage(self, cmd, *args, **kwargs):\n r = self.local_renderer\n environs = kwargs.pop('environs', '').strip()\n if environs:\n environs = ' '.join('export %s=%s;' % tuple(_.split('=')) for _ in environs.split(','))\n environs = ' ' + environs + ' '\n r.env.cmd = cmd\n r.env.SITE = r.genv.SITE or r.genv.default_site\n r.env.args = ' '.join(map(str, args))\n r.env.kwargs = ' '.join(\n ('--%s' % _k if _v in (True, 'True') else '--%s=%s' % (_k, _v))\n for _k, _v in kwargs.items())\n r.env.environs = environs\n if self.is_local:\n r.env.project_dir = r.env.local_project_dir\n r.run_or_local('export SITE={SITE}; export ROLE={ROLE};{environs} cd {project_dir}; {manage_cmd} {cmd} {args} {kwargs}')", - "docstring": "A generic wrapper around Django's manage command." - }, - { - "code": "def parse_filter(self, filters):\n for filter_type in filters:\n if filter_type == 'or' or filter_type == 'and':\n conditions = []\n for field in filters[filter_type]:\n if self.is_field_allowed(field):\n conditions.append(self.create_query(self.parse_field(field, filters[filter_type][field])))\n if filter_type == 'or':\n self.model_query = self.model_query.filter(or_(*conditions))\n elif filter_type == 'and':\n self.model_query = self.model_query.filter(and_(*conditions))\n else:\n if self.is_field_allowed(filter_type):\n conditions = self.create_query(self.parse_field(filter_type, filters[filter_type]))\n self.model_query = self.model_query.filter(conditions)\n return self.model_query", - "docstring": "This method process the filters" - }, - { - "code": "def load_targets(self):\n ldap_services = []\n if self.ldap:\n ldap_services = self.search.get_services(ports=[389], up=True)\n self.ldap_strings = [\"ldap://{}\".format(service.address) for service in ldap_services]\n self.services = self.search.get_services(tags=['smb_signing_disabled'])\n self.ips = [str(service.address) for service in self.services]", - "docstring": "load_targets will load the services with smb signing disabled and if ldap is enabled the services with the ldap port open." - }, - { - "code": "def fetch_published(self):\n pid_type = self['_deposit']['pid']['type']\n pid_value = self['_deposit']['pid']['value']\n resolver = Resolver(\n pid_type=pid_type, object_type='rec',\n getter=partial(self.published_record_class.get_record,\n with_deleted=True)\n )\n return resolver.resolve(pid_value)", - "docstring": "Return a tuple with PID and published record." - }, - { - "code": "def _countOverlapIndices(self, i, j):\n if self.bucketMap.has_key(i) and self.bucketMap.has_key(j):\n iRep = self.bucketMap[i]\n jRep = self.bucketMap[j]\n return self._countOverlap(iRep, jRep)\n else:\n raise ValueError(\"Either i or j don't exist\")", - "docstring": "Return the overlap between bucket indices i and j" - }, - { - "code": "def check_unassigned(self, data):\n for char in data:\n for lookup in self.unassigned:\n if lookup(char):\n raise StringprepError(\"Unassigned character: {0!r}\"\n .format(char))\n return data", - "docstring": "Checks for unassigned character codes." - }, - { - "code": "def peripheral_didReadRSSI_error_(self, peripheral, rssi, error):\n logger.debug('peripheral_didReadRSSI_error called')\n if error is not None:\n return\n device = device_list().get(peripheral)\n if device is not None:\n device._rssi_changed(rssi)", - "docstring": "Called when a new RSSI value for the peripheral is available." - }, - { - "code": "def sound_touch(self, call_params):\n path = '/' + self.api_version + '/SoundTouch/'\n method = 'POST'\n return self.request(path, method, call_params)", - "docstring": "REST Add soundtouch audio effects to a Call" - }, - { - "code": "def _create_struct(data, session):\r\n out = Struct()\r\n for name in data.dtype.names:\r\n item = data[name]\r\n if isinstance(item, np.ndarray) and item.dtype.kind == 'O':\r\n item = item.squeeze().tolist()\r\n out[name] = _extract(item, session)\r\n return out", - "docstring": "Create a struct from session data." - }, - { - "code": "def disconnect(self):\n for name, connection in self.items():\n if not connection.is_closed():\n connection.close()", - "docstring": "Disconnect from all databases" - }, - { - "code": "def terminal(port=default_port(), baud='9600'):\n testargs = ['nodemcu-uploader', port, baud]\n sys.argv = testargs\n miniterm.main()", - "docstring": "Launch minterm from pyserial" - }, - { - "code": "def move(self, external_index, new_priority):\n index = external_index + (self._capacity - 1)\n return self._move(index, new_priority)", - "docstring": "Change the priority of a leaf node" - }, - { - "code": "def run(self, name, config, builder):\n if not isinstance(name, str):\n raise RuntimeError(\"Name has to be a string type\")\n if not isinstance(config, Config):\n raise RuntimeError(\"config has to be a Config type\")\n if not isinstance(builder, Builder):\n raise RuntimeError(\"builder has to be a Builder type\")\n bldr = TopologyBuilder(name=name)\n builder.build(bldr)\n bldr.set_config(config._api_config)\n bldr.build_and_submit()", - "docstring": "Builds the topology and submits it" - }, - { - "code": "def _characteristics_discovered(self, service):\n self._discovered_services.add(service)\n if self._discovered_services >= set(self._peripheral.services()):\n self._discovered.set()", - "docstring": "Called when GATT characteristics have been discovered." - }, - { - "code": "def tile_overlap(inner, outer, norm=False):\n div = 1.0/inner.volume if norm else 1.0\n return div*(inner.volume - util.Tile.intersection(inner, outer).volume)", - "docstring": "How much of inner is in outer by volume" - }, - { - "code": "def _nbytes(buf):\n if isinstance(buf, memoryview):\n if PY3:\n return buf.nbytes\n else:\n size = buf.itemsize\n for dim in buf.shape:\n size *= dim\n return size\n else:\n return len(buf)", - "docstring": "Return byte-size of a memoryview or buffer." - }, - { - "code": "def contains_point(self, x, y, d=2):\n if self.path != None and len(self.path) > 1 \\\n and self.path.contains(x, y):\n if not self.path.contains(x+d, y) \\\n or not self.path.contains(x, y+d) \\\n or not self.path.contains(x-d, y) \\\n or not self.path.contains(x, y-d) \\\n or not self.path.contains(x+d, y+d) \\\n or not self.path.contains(x-d, y-d) \\\n or not self.path.contains(x+d, y-d) \\\n or not self.path.contains(x-d, y+d):\n return True\n return False", - "docstring": "Returns true when x, y is on the path stroke outline." - }, - { - "code": "def tear_down(self):\n while len(self._temp_directories) > 0:\n directory = self._temp_directories.pop()\n shutil.rmtree(directory, ignore_errors=True)\n while len(self._temp_files) > 0:\n file = self._temp_files.pop()\n try:\n os.remove(file)\n except OSError:\n pass", - "docstring": "Tears down all temp files and directories." - }, - { - "code": "def unscored_nodes_iter(self) -> BaseEntity:\n for node, data in self.graph.nodes(data=True):\n if self.tag not in data:\n yield node", - "docstring": "Iterate over all nodes without a score." - }, - { - "code": "def clear(self):\n self._desc = {}\n for key, value in merge.DEFAULT_PROJECT.items():\n if key not in self._HIDDEN:\n self._desc[key] = type(value)()", - "docstring": "Clear description to default values" - }, - { - "code": "def sys_receive(self, cpu, fd, buf, count, rx_bytes):\n if issymbolic(fd):\n logger.info(\"Ask to read from a symbolic file descriptor!!\")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 0)\n if issymbolic(buf):\n logger.info(\"Ask to read to a symbolic buffer\")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 1)\n if issymbolic(count):\n logger.info(\"Ask to read a symbolic number of bytes \")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 2)\n if issymbolic(rx_bytes):\n logger.info(\"Ask to return size to a symbolic address \")\n cpu.PC = cpu.PC - cpu.instruction.size\n raise SymbolicSyscallArgument(cpu, 3)\n return super().sys_receive(cpu, fd, buf, count, rx_bytes)", - "docstring": "Symbolic version of Decree.sys_receive" - }, - { - "code": "def _isCheckpointDir(checkpointDir):\n lastSegment = os.path.split(checkpointDir)[1]\n if lastSegment[0] == '.':\n return False\n if not checkpointDir.endswith(g_defaultCheckpointExtension):\n return False\n if not os.path.isdir(checkpointDir):\n return False\n return True", - "docstring": "Return true iff checkpointDir appears to be a checkpoint directory." - }, - { - "code": "def download(self, bands, download_dir=None, metadata=False):\n super(AWSDownloader, self).validate_bands(bands)\n if download_dir is None:\n download_dir = DOWNLOAD_DIR\n dest_dir = check_create_folder(join(download_dir, self.sceneInfo.name))\n downloaded = []\n for band in bands:\n if band == 'BQA':\n filename = '%s_%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext)\n else:\n filename = '%s_B%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext)\n band_url = join(self.base_url, filename)\n downloaded.append(self.fetch(band_url, dest_dir, filename))\n if metadata:\n filename = '%s_MTL.txt' % (self.sceneInfo.name)\n url = join(self.base_url, filename)\n self.fetch(url, dest_dir, filename)\n return downloaded", - "docstring": "Download each specified band and metadata." - }, - { - "code": "def add_model_string(self, model_str, position=1, file_id=None):\n if file_id is None:\n file_id = self.make_unique_id('inlined_input')\n ret_data = self.file_create(File.from_string(model_str, position,\n file_id))\n return ret_data", - "docstring": "Add a kappa model given in a string to the project." - }, - { - "code": "def create_annotation_node(self, annotation):\n annotation_node = URIRef(str(annotation.spdx_id))\n type_triple = (annotation_node, RDF.type, self.spdx_namespace.Annotation)\n self.graph.add(type_triple)\n annotator_node = Literal(annotation.annotator.to_value())\n self.graph.add((annotation_node, self.spdx_namespace.annotator, annotator_node))\n annotation_date_node = Literal(annotation.annotation_date_iso_format)\n annotation_triple = (annotation_node, self.spdx_namespace.annotationDate, annotation_date_node)\n self.graph.add(annotation_triple)\n if annotation.has_comment:\n comment_node = Literal(annotation.comment)\n comment_triple = (annotation_node, RDFS.comment, comment_node)\n self.graph.add(comment_triple)\n annotation_type_node = Literal(annotation.annotation_type)\n annotation_type_triple = (annotation_node, self.spdx_namespace.annotationType, annotation_type_node)\n self.graph.add(annotation_type_triple)\n return annotation_node", - "docstring": "Return an annotation node." - }, - { - "code": "def _days_in_month(year, month):\n \"year, month -> number of days in that month in that year.\"\n assert 1 <= month <= 12, month\n if month == 2 and _is_leap(year):\n return 29\n return _DAYS_IN_MONTH[month]", - "docstring": "year, month -> number of days in that month in that year." - }, - { - "code": "def angle(self, x0, y0, x1, y1):\n a = degrees( atan((y1-y0) / (x1-x0+0.00001)) ) + 360\n if x1-x0 < 0: a += 180\n return a", - "docstring": "Calculates the angle between two points." - }, - { - "code": "def fetch_and_filter_tags(self):\n self.all_tags = self.fetcher.get_all_tags()\n self.filtered_tags = self.get_filtered_tags(self.all_tags)\n self.fetch_tags_dates()", - "docstring": "Fetch and filter tags, fetch dates and sort them in time order." - }, - { - "code": "def create_query(self, attr):\n field = attr[0]\n operator = attr[1]\n value = attr[2]\n model = self.model\n if '.' in field:\n field_items = field.split('.')\n field_name = getattr(model, field_items[0], None)\n class_name = field_name.property.mapper.class_\n new_model = getattr(class_name, field_items[1])\n return field_name.has(OPERATORS[operator](new_model, value))\n return OPERATORS[operator](getattr(model, field, None), value)", - "docstring": "Mix all values and make the query" - }, - { - "code": "def _wait_state(self, state, reward, terminal):\n while state == [None] or not state:\n state, terminal, reward = self._execute(dict(key=0))\n return state, terminal, reward", - "docstring": "Wait until there is a state." - }, - { - "code": "def python_value(self, value):\n value = coerce_to_bytes(value)\n obj = HashValue(value)\n obj.field = self\n return obj", - "docstring": "Convert the database value to a pythonic value." - }, - { - "code": "def _initEphemerals(self):\n if hasattr(self, '_sfdr') and self._sfdr:\n self._spatialPoolerOutput = numpy.zeros(self.columnCount,\n dtype=GetNTAReal())\n else:\n self._spatialPoolerOutput = None\n self._fpLogSPInput = None\n self._fpLogSP = None\n self._fpLogSPDense = None\n self.logPathInput = \"\"\n self.logPathOutput = \"\"\n self.logPathOutputDense = \"\"", - "docstring": "Initialize all ephemerals used by derived classes." - }, - { - "code": "def can(obj):\n import_needed = False\n for cls, canner in iteritems(can_map):\n if isinstance(cls, string_types):\n import_needed = True\n break\n elif istype(obj, cls):\n return canner(obj)\n if import_needed:\n _import_mapping(can_map, _original_can_map)\n return can(obj)\n return obj", - "docstring": "Prepare an object for pickling." - }, - { - "code": "def _parse_frequencies(self):\n frequencies = OrderedDict([\n ('EXAC', 'Unknown'),\n ('ESP', 'Unknown'),\n ('TGP', 'Unknown')])\n pref_freq = 'Unknown'\n for source in frequencies.keys():\n freq_key = 'AF_' + source\n if freq_key in self.info:\n frequencies[source] = self.info[freq_key]\n if pref_freq == 'Unknown':\n pref_freq = frequencies[source]\n return pref_freq, frequencies", - "docstring": "Parse frequency data in ClinVar VCF" - }, - { - "code": "def customize(func):\n @wraps(func)\n def call_w_context(*args, **kwargs):\n set_context = kwargs.pop('set_context', True)\n if set_context:\n with plotting_context(), axes_style():\n return func(*args, **kwargs)\n else:\n return func(*args, **kwargs)\n return call_w_context", - "docstring": "Decorator to set plotting context and axes style during function call." - }, - { - "code": "def gce_list(service=None, **kwargs):\n resp_list = []\n req = service.list(**kwargs)\n while req is not None:\n resp = req.execute()\n for item in resp.get('items', []):\n resp_list.append(item)\n req = service.list_next(previous_request=req, previous_response=resp)\n return resp_list", - "docstring": "General list function for the GCE service." - }, - { - "code": "def abfIDfromFname(fname):\n fname=os.path.abspath(fname)\n basename=os.path.basename(fname)\n return os.path.splitext(basename)[0]", - "docstring": "given a filename, return the ABFs ID string." - }, - { - "code": "def confirm(self):\n self.email.is_verified = True\n self.email.save()\n signals.email_verified.send(email=self.email, sender=self.__class__)\n logger.info(\"Verified email address: %s\", self.email.email)", - "docstring": "Mark the instance's email as verified." - }, - { - "code": "def __intermediate_htmode(self, radio):\n protocol = radio.pop('protocol')\n channel_width = radio.pop('channel_width')\n if 'htmode' in radio:\n return radio['htmode']\n if protocol == '802.11n':\n return 'HT{0}'.format(channel_width)\n elif protocol == '802.11ac':\n return 'VHT{0}'.format(channel_width)\n return 'NONE'", - "docstring": "only for mac80211 driver" - }, - { - "code": "def zone_transfer(address, dns_name):\n ips = []\n try:\n print_notification(\"Attempting dns zone transfer for {} on {}\".format(dns_name, address))\n z = dns.zone.from_xfr(dns.query.xfr(address, dns_name))\n except dns.exception.FormError:\n print_notification(\"Zone transfer not allowed\")\n return ips\n names = z.nodes.keys()\n print_success(\"Zone transfer successfull for {}, found {} entries\".format(address, len(names)))\n for n in names:\n node = z[n]\n data = node.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)\n if data:\n for item in data.items:\n address = item.address\n ips.append(address)\n return ips", - "docstring": "Tries to perform a zone transfer." - }, - { - "code": "def _add_timeout_handler(self, handler):\n self.timeout_handlers.append(handler)\n if self.event_thread is None:\n return\n self._run_timeout_threads(handler)", - "docstring": "Add a TimeoutHandler to the pool." - }, - { - "code": "async def run_tasks(self):\n tasks = self.get_tasks()\n self._gathered_tasks = asyncio.gather(*tasks, loop=self.loop)\n try:\n await self._gathered_tasks\n except CancelledError:\n pass", - "docstring": "Run the tasks attached to the instance" - }, - { - "code": "def _get_service(self):\n if \"service\" in self.document.attrib:\n value = self.document.attrib[\"service\"].lower()\n if value in allowed_service_types:\n self.params[\"service\"] = value\n else:\n raise OWSInvalidParameterValue(\"Service %s is not supported\" % value, value=\"service\")\n else:\n raise OWSMissingParameterValue('Parameter \"service\" is missing', value=\"service\")\n return self.params[\"service\"]", - "docstring": "Check mandatory service name parameter in POST request." - }, - { - "code": "def process_data(self):\n self.visibility = self.data[:, :, 3]\n self.positions = self.data[:, :, :3]\n self.velocities = np.zeros_like(self.positions) + 1000\n for frame_no in range(1, len(self.data) - 1):\n prev = self.data[frame_no - 1]\n next = self.data[frame_no + 1]\n for c in range(self.num_markers):\n if -1 < prev[c, 3] < 100 and -1 < next[c, 3] < 100:\n self.velocities[frame_no, c] = (\n next[c, :3] - prev[c, :3]) / (2 * self.world.dt)\n self.cfms = np.zeros_like(self.visibility) + self.DEFAULT_CFM", - "docstring": "Process data to produce velocity and dropout information." - }, - { - "code": "def download(ctx):\n user, project_name = get_project_or_local(ctx.obj.get('project'))\n try:\n PolyaxonClient().project.download_repo(user, project_name)\n except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:\n Printer.print_error('Could not download code for project `{}`.'.format(project_name))\n Printer.print_error('Error message `{}`.'.format(e))\n sys.exit(1)\n Printer.print_success('Files downloaded.')", - "docstring": "Download code of the current project." - }, - { - "code": "def query_by_user(cls, user, **kwargs):\n return cls._filter(\n cls.query.filter_by(user_id=user.get_id()),\n **kwargs\n )", - "docstring": "Get a user's memberships." - }, - { - "code": "def info(msg):\n _flush()\n sys.stdout.write(msg + '\\n')\n sys.stdout.flush()", - "docstring": "Emit a normal message." - }, - { - "code": "def _start_proc(self):\n assert '_proc' not in dir(self) or self._proc is None\n try:\n self._proc = Popen(shlex.split(self._command), stdin=PIPE, stdout=PIPE, bufsize=0, universal_newlines=True)\n except OSError as e:\n print(e, \"Probably too many cached expressions? visitors._cache...\")\n raise Z3NotFoundError\n for cfg in self._init:\n self._send(cfg)", - "docstring": "Spawns z3 solver process" - }, - { - "code": "def addPrivateKey(self, wif):\n try:\n pub = self.publickey_from_wif(wif)\n except Exception:\n raise InvalidWifError(\"Invalid Key format!\")\n if str(pub) in self.store:\n raise KeyAlreadyInStoreException(\"Key already in the store\")\n self.store.add(str(wif), str(pub))", - "docstring": "Add a private key to the wallet database" - }, - { - "code": "def remove_nodes(self, pattern, adict):\n mydict = self._filetree if adict is None else adict\n if isinstance(mydict, dict):\n for nom in mydict.keys():\n if isinstance(mydict[nom], dict):\n matchs = filter_list(mydict[nom], pattern)\n for nom in matchs:\n mydict = self.remove_nodes(pattern, mydict[nom])\n mydict.pop(nom)\n else:\n mydict[nom] = filter_list(mydict[nom], pattern)\n else:\n matchs = set(filter_list(mydict, pattern))\n mydict = set(mydict) - matchs\n return mydict", - "docstring": "Remove the nodes that match the pattern." - }, - { - "code": "def spatial_map(icc, thr, mode='+'):\n return thr_img(icc_img_to_zscore(icc), thr=thr, mode=mode).get_data()", - "docstring": "Return the thresholded z-scored `icc`." - }, - { - "code": "def _add_in_streams(self, bolt):\n if self.inputs is None:\n return\n input_dict = self._sanitize_inputs()\n for global_streamid, gtype in input_dict.items():\n in_stream = bolt.inputs.add()\n in_stream.stream.CopyFrom(self._get_stream_id(global_streamid.component_id,\n global_streamid.stream_id))\n if isinstance(gtype, Grouping.FIELDS):\n in_stream.gtype = gtype.gtype\n in_stream.grouping_fields.CopyFrom(self._get_stream_schema(gtype.fields))\n elif isinstance(gtype, Grouping.CUSTOM):\n in_stream.gtype = gtype.gtype\n in_stream.custom_grouping_object = gtype.python_serialized\n in_stream.type = topology_pb2.CustomGroupingObjectType.Value(\"PYTHON_OBJECT\")\n else:\n in_stream.gtype = gtype", - "docstring": "Adds inputs to a given protobuf Bolt message" - }, - { - "code": "def round_any(x, accuracy, f=np.round):\n if not hasattr(x, 'dtype'):\n x = np.asarray(x)\n return f(x / accuracy) * accuracy", - "docstring": "Round to multiple of any number." - }, - { - "code": "def teardown(file):\n config = read_deployment_config(file)\n manager = DeployManager(config=config, filepath=file)\n exception = None\n try:\n if click.confirm('Would you like to execute pre-delete hooks?', default=True):\n manager.teardown(hooks=True)\n else:\n manager.teardown(hooks=False)\n except Exception as e:\n Printer.print_error('Polyaxon could not teardown the deployment.')\n exception = e\n if exception:\n Printer.print_error('Error message `{}`.'.format(exception))", - "docstring": "Teardown a polyaxon deployment given a config file." - }, - { - "code": "def axes(self):\n return [np.array(self.ode_obj.getAxis1()),\n np.array(self.ode_obj.getAxis2())]", - "docstring": "A list of axes of rotation for this joint." - }, - { - "code": "def _assure_dir(self):\n try:\n os.makedirs(self._state_dir)\n except OSError as err:\n if err.errno != errno.EEXIST:\n raise", - "docstring": "Make sure the state directory exists" - }, - { - "code": "def lib2to3_parse(src_txt):\n grammar = pygram.python_grammar_no_print_statement\n drv = driver.Driver(grammar, pytree.convert)\n if src_txt[-1] != '\\n':\n nl = '\\r\\n' if '\\r\\n' in src_txt[:1024] else '\\n'\n src_txt += nl\n try:\n result = drv.parse_string(src_txt, True)\n except ParseError as pe:\n lineno, column = pe.context[1]\n lines = src_txt.splitlines()\n try:\n faulty_line = lines[lineno - 1]\n except IndexError:\n faulty_line = \"\"\n raise ValueError(f\"Cannot parse: {lineno}:{column}: {faulty_line}\") from None\n if isinstance(result, Leaf):\n result = Node(syms.file_input, [result])\n return result", - "docstring": "Given a string with source, return the lib2to3 Node." - }, - { - "code": "def swap_environment_cnames(self, from_env_name, to_env_name):\n self.ebs.swap_environment_cnames(source_environment_name=from_env_name,\n destination_environment_name=to_env_name)", - "docstring": "Swaps cnames for an environment" - }, - { - "code": "def sample_minibatch(self, batch_size):\n pool_size = len(self)\n if pool_size == 0:\n return []\n delta_p = self._memory[0] / batch_size\n chosen_idx = []\n if abs(self._memory[0]) < util.epsilon:\n chosen_idx = np.random.randint(self._capacity - 1, self._capacity - 1 + len(self), size=batch_size).tolist()\n else:\n for i in xrange(batch_size):\n lower = max(i * delta_p, 0)\n upper = min((i + 1) * delta_p, self._memory[0])\n p = random.uniform(lower, upper)\n chosen_idx.append(self._sample_with_priority(p))\n return [(i, self._memory[i]) for i in chosen_idx]", - "docstring": "Sample minibatch of size batch_size." - }, - { - "code": "def _sm_cleanup(self, *args, **kwargs):\n if self._done_notification_func is not None:\n self._done_notification_func()\n self._timer.cancel()", - "docstring": "Delete all state associated with the chaos session" - }, - { - "code": "def _filter(cls, query, state=MembershipState.ACTIVE, eager=None):\n query = query.filter_by(state=state)\n eager = eager or []\n for field in eager:\n query = query.options(joinedload(field))\n return query", - "docstring": "Filter a query result." - }, - { - "code": "def from_json_list(cls, api_client, data):\n return [cls.from_json(api_client, item) for item in data]", - "docstring": "Convert a list of JSON values to a list of models" - }, - { - "code": "def modify_data(data):\n with tempfile.NamedTemporaryFile('w') as f:\n for entry in data:\n f.write(json.dumps(entry.to_dict(\n include_meta=True),\n default=datetime_handler))\n f.write('\\n')\n f.flush()\n print_success(\"Starting editor\")\n subprocess.call(['nano', '-', f.name])\n with open(f.name, 'r') as f:\n return f.readlines()", - "docstring": "Creates a tempfile and starts the given editor, returns the data afterwards." - }, - { - "code": "def dropbox_fileupload(dropbox, request):\n attachment = request.POST['attachment']\n attached = dropbox.add_attachment(attachment)\n return dict(\n files=[dict(\n name=attached,\n type=attachment.type,\n )]\n )", - "docstring": "accepts a single file upload and adds it to the dropbox as attachment" - }, - { - "code": "def pydevd(context):\n global pdevd_not_available\n if pdevd_not_available:\n return ''\n try:\n import pydevd\n except ImportError:\n pdevd_not_available = True\n return ''\n render = lambda s: template.Template(s).render(context)\n availables = get_variables(context)\n for var in availables:\n locals()[var] = context[var]\n try:\n pydevd.settrace()\n except socket.error:\n pdevd_not_available = True\n return ''", - "docstring": "Start a pydev settrace" - }, - { - "code": "def package_info(cls, package):\n if package not in cls.package_info_cache:\n package_json_url = 'https://pypi.python.org/pypi/%s/json' % package\n try:\n logging.getLogger('requests').setLevel(logging.WARN)\n response = requests.get(package_json_url)\n response.raise_for_status()\n cls.package_info_cache[package] = simplejson.loads(response.text)\n except Exception as e:\n log.debug('Could not get package info from %s: %s', package_json_url, e)\n cls.package_info_cache[package] = None\n return cls.package_info_cache[package]", - "docstring": "All package info for given package" - }, - { - "code": "def file(file_object, start_on=None, ignore=(), use_short=True, **queries):\n return string(file_object.read(), start_on=start_on, ignore=ignore, use_short=use_short, **queries)", - "docstring": "Returns a blox template from a file stream object" - }, - { - "code": "def spec_formatter(cls, spec):\n \" Formats the elements of an argument set appropriately\"\n return type(spec)((k, str(v)) for (k,v) in spec.items())", - "docstring": "Formats the elements of an argument set appropriately" - }, - { - "code": "def col_transform(self, col, digits):\n if col is None or float(col) < 0.0:\n return None\n else:\n col = self.number_to_base(int(col), self.base, digits)\n if len(col) == digits:\n return col\n else:\n return [0 for _ in range(digits - len(col))] + col", - "docstring": "The lambda body to transform the column values" - }, - { - "code": "def centralManager_didDisconnectPeripheral_error_(self, manager, peripheral, error):\n logger.debug('centralManager_didDisconnectPeripheral called')\n device = device_list().get(peripheral)\n if device is not None:\n device._set_disconnected()\n device_list().remove(peripheral)", - "docstring": "Called when a device is disconnected." - }, - { - "code": "def _sm_start(self, *args, **kwargs):\n millisec = random.randint(self._start_min_delay, self._start_max_delay)\n self._timer = threading.Timer(millisec / 1000.0, self.event_timeout)\n self._timer.start()", - "docstring": "Start the timer waiting for pain" - }, - { - "code": "def optionhelp(self, indent=0, maxindent=25, width=79):\n def makelabels(option):\n labels = '%*s--%s' % (indent, ' ', option.name)\n if option.abbreviation:\n labels += ', -' + option.abbreviation\n return labels + ': '\n docs = []\n helpindent = _autoindent([makelabels(o) for o in self.options.values()], indent, maxindent)\n for name in self.option_order:\n option = self.options[name]\n labels = makelabels(option)\n helpstring = \"%s(%s). %s\" % (option.formatname, option.strvalue, option.docs)\n wrapped = self._wrap_labelled(labels, helpstring, helpindent, width)\n docs.extend(wrapped)\n return '\\n'.join(docs)", - "docstring": "Return user friendly help on program options." - }, - { - "code": "def clean_course(self):\n course_id = self.cleaned_data[self.Fields.COURSE].strip()\n if not course_id:\n return None\n try:\n client = EnrollmentApiClient()\n return client.get_course_details(course_id)\n except (HttpClientError, HttpServerError):\n raise ValidationError(ValidationMessages.INVALID_COURSE_ID.format(course_id=course_id))", - "docstring": "Verify course ID and retrieve course details." - }, - { - "code": "def trim_display_field(self, value, max_length):\n if not value:\n return ''\n if len(value) > max_length:\n return value[:max_length - 3] + '...'\n return value", - "docstring": "Return a value for display; if longer than max length, use ellipsis." - }, - { - "code": "def printdir(self):\n print(\"%-46s %19s %12s\" % (\"File Name\", \"Modified \", \"Size\"))\n for rarinfo in self.filelist:\n date = \"%d-%02d-%02d %02d:%02d:%02d\" % rarinfo.date_time[:6]\n print(\"%-46s %s %12d\" % (\n rarinfo.filename, date, rarinfo.file_size))", - "docstring": "Print a table of contents for the RAR file." - }, - { - "code": "async def disconnect(self):\r\n if not self.is_connected:\r\n return\r\n await self.stop()\r\n ws = self._lavalink.bot._connection._get_websocket(int(self.guild_id))\r\n await ws.voice_state(self.guild_id, None)", - "docstring": "Disconnects from the voice channel, if any." - }, - { - "code": "def _cumulative_returns_less_costs(returns, costs):\n if costs is None:\n return ep.cum_returns(returns)\n return ep.cum_returns(returns - costs)", - "docstring": "Compute cumulative returns, less costs." - }, - { - "code": "def checkSerial(self):\n for item in self.rxSerial(self._TUN._tun.mtu):\n try:\n self._TUN._tun.write(item)\n except pytun.Error as error:\n print(\"pytun error writing: {0}\".format(item))\n print(error)", - "docstring": "Check the serial port for data to write to the TUN adapter." - }, - { - "code": "def install_required(self, type=None, service=None, list_only=0, **kwargs):\n r = self.local_renderer\n list_only = int(list_only)\n type = (type or '').lower().strip()\n assert not type or type in PACKAGE_TYPES, 'Unknown package type: %s' % (type,)\n lst = []\n if type:\n types = [type]\n else:\n types = PACKAGE_TYPES\n for _type in types:\n if _type == SYSTEM:\n content = '\\n'.join(self.list_required(type=_type, service=service))\n if list_only:\n lst.extend(_ for _ in content.split('\\n') if _.strip())\n if self.verbose:\n print('content:', content)\n break\n fd, fn = tempfile.mkstemp()\n fout = open(fn, 'w')\n fout.write(content)\n fout.close()\n self.install_custom(fn=fn)\n else:\n raise NotImplementedError\n return lst", - "docstring": "Installs system packages listed as required by services this host uses." - }, - { - "code": "def match(self, record):\n for field, meta in self.filterDict.iteritems():\n index = meta['index']\n categories = meta['categories']\n for category in categories:\n if not record:\n continue\n if record[index].find(category) != -1:\n return True\n return False", - "docstring": "Returns True if the record matches any of the provided filters" - }, - { - "code": "def argparser(self):\n core_parser = self.core_parser\n core_parser.add_argument('-r', '--range', type=str, help=\"The range to search for use\")\n return core_parser", - "docstring": "Argparser option with search functionality specific for ranges." - }, - { - "code": "def replies(self):\n fs_reply_path = join(self.fs_replies_path, 'message_001.txt')\n if exists(fs_reply_path):\n return [load(open(fs_reply_path, 'r'))]\n else:\n return []", - "docstring": "returns a list of strings" - }, - { - "code": "def find_libname(self, name):\n names = [\"{}.lib\", \"lib{}.lib\", \"{}lib.lib\"]\n names = [n.format(name) for n in names]\n dirs = self.get_library_dirs()\n for d in dirs:\n for n in names:\n if exists(join(d, n)):\n return n[:-4]\n msg = \"Could not find the {} library.\".format(name)\n raise ValueError(msg)", - "docstring": "Try to infer the correct library name." - }, - { - "code": "def dask_chroms(data, samples):\n h5s = [os.path.join(data.dirs.across, s.name+\".tmp.h5\") for s in samples]\n handles = [h5py.File(i) for i in h5s]\n dsets = [i['/ichrom'] for i in handles]\n arrays = [da.from_array(dset, chunks=(10000, 3)) for dset in dsets]\n stack = da.stack(arrays, axis=2)\n maxchrom = da.max(stack, axis=2)[:, 0]\n maxpos = da.max(stack, axis=2)[:, 2]\n mask = stack == 0\n stack[mask] = 9223372036854775807\n minpos = da.min(stack, axis=2)[:, 1]\n final = da.stack([maxchrom, minpos, maxpos], axis=1)\n final.to_hdf5(data.clust_database, \"/chroms\")\n _ = [i.close() for i in handles]", - "docstring": "A dask relay function to fill chroms for all samples" - }, - { - "code": "def p_file_lic_conc(self, f_term, predicate):\n try:\n for _, _, licenses in self.graph.triples((f_term, predicate, None)):\n if (licenses, RDF.type, self.spdx_namespace['ConjunctiveLicenseSet']) in self.graph:\n lics = self.handle_conjunctive_list(licenses)\n self.builder.set_concluded_license(self.doc, lics)\n elif (licenses, RDF.type, self.spdx_namespace['DisjunctiveLicenseSet']) in self.graph:\n lics = self.handle_disjunctive_list(licenses)\n self.builder.set_concluded_license(self.doc, lics)\n else:\n try:\n lics = self.handle_lics(licenses)\n self.builder.set_concluded_license(self.doc, lics)\n except SPDXValueError:\n self.value_error('FILE_SINGLE_LICS', licenses)\n except CardinalityError:\n self.more_than_one_error('file {0}'.format(predicate))", - "docstring": "Sets file licenses concluded." - }, - { - "code": "def cmd_daemon(opts):\n if opts.data_dir is None:\n raise BlockadeError(\"You must supply a data directory for the daemon\")\n rest.start(data_dir=opts.data_dir, port=opts.port, debug=opts.debug,\n host_exec=get_host_exec())", - "docstring": "Start the Blockade REST API" - }, - { - "code": "def _make(cls, iterable, new=tuple.__new__, len=len):\n 'Make a new Match object from a sequence or iterable'\n result = new(cls, iterable)\n if len(result) != 3:\n raise TypeError('Expected 3 arguments, got %d' % len(result))\n return result", - "docstring": "Make a new Match object from a sequence or iterable" - }, - { - "code": "def add_user_jobs(session, job_ids):\n jobs_data = {\n 'jobs[]': job_ids\n }\n response = make_post_request(session, 'self/jobs', json_data=jobs_data)\n json_data = response.json()\n if response.status_code == 200:\n return json_data['status']\n else:\n raise UserJobsNotAddedException(\n message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Add a list of jobs to the currently authenticated user" - }, - { - "code": "def join_phonemes(*args):\n if len(args) == 1:\n args = args[0]\n if len(args) == 2:\n args += (CODAS[0],)\n try:\n onset, nucleus, coda = args\n except ValueError:\n raise TypeError('join_phonemes() takes at most 3 arguments')\n offset = (\n (ONSETS.index(onset) * NUM_NUCLEUSES + NUCLEUSES.index(nucleus)) *\n NUM_CODAS + CODAS.index(coda)\n )\n return unichr(FIRST_HANGUL_OFFSET + offset)", - "docstring": "Joins a Hangul letter from Korean phonemes." - }, - { - "code": "def memberness(context):\n if context:\n texts = context.xpath('.//*[local-name()=\"explicitMember\"]/text()').extract()\n text = str(texts).lower()\n if len(texts) > 1:\n return 2\n elif 'country' in text:\n return 2\n elif 'member' not in text:\n return 0\n elif 'successor' in text:\n return 1\n elif 'parent' in text:\n return 2\n return 3", - "docstring": "The likelihood that the context is a \"member\"." - }, - { - "code": "def getTotaln(self):\n n = sum([field.n for field in self.fields])\n return n", - "docstring": "Returns the cumulative n for all the fields in the dataset" - }, - { - "code": "def postprocess(trun):\n plog = []\n plog.append((\"trun\", process_trun(trun)))\n for tsuite in trun[\"testsuites\"]:\n plog.append((\"tsuite\", process_tsuite(tsuite)))\n for tcase in tsuite[\"testcases\"]:\n plog.append((\"tcase\", process_tcase(tcase)))\n for task, success in plog:\n if not success:\n cij.err(\"rprtr::postprocess: FAILED for %r\" % task)\n return sum((success for task, success in plog))", - "docstring": "Perform postprocessing of the given test run" - }, - { - "code": "def strip_ip(packet):\n if not isinstance(packet, IP):\n packet = IP(packet)\n payload = packet.payload\n return payload", - "docstring": "Remove the IP packet layer, yielding the transport layer." - }, - { - "code": "def contains(x):\n if isinstance(x, str):\n x = canonical_name(x)\n return x in _TO_COLOR_USER or x in _TO_COLOR\n else:\n x = tuple(x)\n return x in _TO_NAME_USER or x in _TO_NAME", - "docstring": "Return true if this string or integer tuple appears in tables" - }, - { - "code": "def _regex_replacement(self, target, replacement):\n match = re.compile(target)\n self.data = match.sub(replacement, self.data)", - "docstring": "Regex substitute target with replacement" - }, - { - "code": "def reset_counter(self):\n self._cnt_retries = 0\n for i in self._url_counter:\n self._url_counter[i] = 0", - "docstring": "reset the failed connection counters" - }, - { - "code": "def combine(self):\n self.gbuffer.color_attachments[0].use(location=0)\n self.combine_shader[\"diffuse_buffer\"].value = 0\n self.lightbuffer.color_attachments[0].use(location=1)\n self.combine_shader[\"light_buffer\"].value = 1\n self.quad.render(self.combine_shader)", - "docstring": "Combine diffuse and light buffer" - }, - { - "code": "def json_iter (path):\n with open(path, 'r') as f:\n for line in f.readlines():\n yield json.loads(line)", - "docstring": "iterator for JSON-per-line in a file pattern" - }, - { - "code": "def tex_parse(string):\n\tstring = string.replace('{', '').replace('}', '')\n\tdef tex_replace(match):\n\t\treturn \\\n\t\t\tsub(r'\\^(\\w)', r'\\1',\n\t\t\tsub(r'\\^\\{(.*?)\\}', r'\\1',\n\t\t\tsub(r'\\_(\\w)', r'\\1',\n\t\t\tsub(r'\\_\\{(.*?)\\}', r'\\1',\n\t\t\tsub(r'\\\\(' + GREEK_LETTERS + ')', r'&\\1;', match.group(1))))))\n\treturn mark_safe(sub(r'\\$([^\\$]*)\\$', tex_replace, escape(string)))", - "docstring": "Renders some basic TeX math to HTML." - }, - { - "code": "def update_throttle_scope(self):\n self.scope = SERVICE_USER_SCOPE\n self.rate = self.get_rate()\n self.num_requests, self.duration = self.parse_rate(self.rate)", - "docstring": "Update throttle scope so that service user throttle rates are applied." - }, - { - "code": "def angle(x0, y0, x1, y1):\r\n return degrees(atan2(y1-y0, x1-x0))", - "docstring": "Returns the angle between two points." - }, - { - "code": "def readattr(path, name):\n try:\n f = open(USB_SYS_PREFIX + path + \"/\" + name)\n return f.readline().rstrip(\"\\n\")\n except IOError:\n return None", - "docstring": "Read attribute from sysfs and return as string" - }, - { - "code": "def json_get_data(filename):\n with open(filename) as fp:\n json_data = json.load(fp)\n return json_data\n return False", - "docstring": "Get data from json file" - }, - { - "code": "def _on_event(self, conv_event):\n conv = self._conv_list.get(conv_event.conversation_id)\n user = conv.get_user(conv_event.user_id)\n show_notification = all((\n isinstance(conv_event, hangups.ChatMessageEvent),\n not user.is_self,\n not conv.is_quiet,\n ))\n if show_notification:\n self.add_conversation_tab(conv_event.conversation_id)\n if self._discreet_notifications:\n notification = DISCREET_NOTIFICATION\n else:\n notification = notifier.Notification(\n user.full_name, get_conv_name(conv), conv_event.text\n )\n self._notifier.send(notification)", - "docstring": "Open conversation tab for new messages & pass events to notifier." - }, - { - "code": "def models_preparing(app):\n def wrapper(resource, parent):\n if isinstance(resource, DeclarativeMeta):\n resource = ListResource(resource)\n if not getattr(resource, '__parent__', None):\n resource.__parent__ = parent\n return resource\n resources_preparing_factory(app, wrapper)", - "docstring": "Wrap all sqlalchemy model in settings." - }, - { - "code": "def paths_from_env(prefix=None, names=None):\n def expand_path(path):\n return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))\n if prefix is None:\n prefix = \"CIJ\"\n if names is None:\n names = [\n \"ROOT\", \"ENVS\", \"TESTPLANS\", \"TESTCASES\", \"TESTSUITES\", \"MODULES\",\n \"HOOKS\", \"TEMPLATES\"\n ]\n conf = {v: os.environ.get(\"_\".join([prefix, v])) for v in names}\n for env in (e for e in conf.keys() if e[:len(prefix)] in names and conf[e]):\n conf[env] = expand_path(conf[env])\n if not os.path.exists(conf[env]):\n err(\"%s_%s: %r, does not exist\" % (prefix, env, conf[env]))\n return conf", - "docstring": "Construct dict of paths from environment variables" - }, - { - "code": "def locked_context(self, key=None, default=dict):\n keys = ['policy']\n if key is not None:\n keys.append(key)\n with self._executor.locked_context('.'.join(keys), default) as policy_context:\n yield policy_context", - "docstring": "Policy shared context dictionary" - }, - { - "code": "def acquire_lock(func):\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n with self.locker as r:\n acquired, code, _ = r\n if acquired:\n try:\n r = func(self, *args, **kwargs)\n except Exception as err:\n e = str(err)\n else:\n e = None\n else:\n warnings.warn(\"code %s. Unable to aquire the lock when calling '%s'. You may try again!\"%(code,func.__name__) )\n e = None\n r = None\n if e is not None:\n traceback.print_stack()\n raise Exception(e)\n return r\n return wrapper", - "docstring": "Decorate methods when locking repository is required." - }, - { - "code": "def _generateFindR(self, **kwargs):\n for needle in self._generateChildrenR():\n if needle._match(**kwargs):\n yield needle", - "docstring": "Generator which yields matches on AXChildren and their children." - }, - { - "code": "def min_item(self):\n if self.is_empty():\n raise ValueError(\"Tree is empty\")\n node = self._root\n while node.left is not None:\n node = node.left\n return node.key, node.value", - "docstring": "Get item with min key of tree, raises ValueError if tree is empty." - }, - { - "code": "def show_help():\n print(\n)\n for cmd in [write_main, check_main, list_main]:\n print(cmd.__doc__.lstrip(\"\\n\"))", - "docstring": "Print the help string for the edx_lint command." - }, - { - "code": "def _send(self, stanza):\n self.fix_out_stanza(stanza)\n element = stanza.as_xml()\n self._write_element(element)", - "docstring": "Same as `send` but assume `lock` is acquired." - }, - { - "code": "def format_docstring(*args, **kwargs):\n def decorator(func):\n func.__doc__ = getdoc(func).format(*args, **kwargs)\n return func\n return decorator", - "docstring": "Decorator for clean docstring formatting" - }, - { - "code": "def save_photon_hdf5(self, identity=None, overwrite=True, path=None):\n filepath = self.filepath\n if path is not None:\n filepath = Path(path, filepath.name)\n self.merge_da()\n data = self._make_photon_hdf5(identity=identity)\n phc.hdf5.save_photon_hdf5(data, h5_fname=str(filepath),\n overwrite=overwrite)", - "docstring": "Create a smFRET Photon-HDF5 file with current timestamps." - }, - { - "code": "def md_dimension_info(name, node):\n def _get_value(child_name):\n return getattr(node.find(child_name), 'text', None)\n resolution = _get_value('resolution')\n defaultValue = node.find(\"defaultValue\")\n strategy = defaultValue.find(\"strategy\") if defaultValue is not None else None\n strategy = strategy.text if strategy is not None else None\n return DimensionInfo(\n name,\n _get_value('enabled') == 'true',\n _get_value('presentation'),\n int(resolution) if resolution else None,\n _get_value('units'),\n _get_value('unitSymbol'),\n strategy,\n _get_value('attribute'),\n _get_value('endAttribute'),\n _get_value('referenceValue'),\n _get_value('nearestMatchEnabled')\n )", - "docstring": "Extract metadata Dimension Info from an xml node" - }, - { - "code": "def enrich_reactions(graph: BELGraph):\n nodes = list(get_nodes_by_function(graph, REACTION))\n for u in nodes:\n for v in u.reactants:\n graph.add_has_reactant(u, v)\n for v in u.products:\n graph.add_has_product(u, v)", - "docstring": "Adds all of the reactants and products of reactions to the graph." - }, - { - "code": "def handle(self, *args, **kwargs):\n cutoff = timezone.now()\n cutoff -= app_settings.CONFIRMATION_EXPIRATION\n cutoff -= app_settings.CONFIRMATION_SAVE_PERIOD\n queryset = models.EmailConfirmation.objects.filter(\n created_at__lte=cutoff\n )\n count = queryset.count()\n queryset.delete()\n if count:\n self.stdout.write(\n self.style.SUCCESS(\n \"Removed {count} old email confirmation(s)\".format(\n count=count\n )\n )\n )\n else:\n self.stdout.write(\"No email confirmations to remove.\")", - "docstring": "Handle execution of the command." - }, - { - "code": "def _deactivate(self):\n self.cache.remove_fetcher(self)\n if self.active:\n self._deactivated()", - "docstring": "Remove the fetcher from cache and mark it not active." - }, - { - "code": "def writetofastq(data, dsort, read):\n if read == 1:\n rrr = \"R1\"\n else:\n rrr = \"R2\"\n for sname in dsort:\n handle = os.path.join(data.dirs.fastqs, \n \"{}_{}_.fastq\".format(sname, rrr))\n with open(handle, 'a') as out:\n out.write(\"\".join(dsort[sname]))", - "docstring": "Writes sorted data 'dsort dict' to a tmp files" - }, - { - "code": "def _normalised_python(self):\n dx = (self.screen.width / float(len(self.points)))\n oy = (self.screen.height)\n for x, point in enumerate(self.points):\n y = (point - self.minimum) * 4.0 / self.extents * self.size.y\n yield Point((\n dx * x,\n min(oy, oy - y),\n ))", - "docstring": "Normalised data points using pure Python." - }, - { - "code": "def to_timezone(self, dt):\n if timezone.is_aware(dt):\n return dt.astimezone(self.timezone)\n else:\n return timezone.make_aware(dt, self.timezone)", - "docstring": "Converts a datetime to the timezone of this Schedule." - }, - { - "code": "def _fill(self, size):\n try:\n for i in range(size):\n self.buffer.append(self.source.next())\n except StopIteration:\n self.buffer.append((EndOfFile, EndOfFile))\n self.len = len(self.buffer)", - "docstring": "fills the internal buffer from the source iterator" - }, - { - "code": "def with_logger(cls):\n attr_name = '_logger'\n cls_name = cls.__qualname__\n module = cls.__module__\n if module is not None:\n cls_name = module + '.' + cls_name\n else:\n raise AssertionError\n setattr(cls, attr_name, logging.getLogger(cls_name))\n return cls", - "docstring": "Class decorator to add a logger to a class." - }, - { - "code": "def tsuite_enter(trun, tsuite):\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:tsuite:enter { name: %r }\" % tsuite[\"name\"])\n rcode = 0\n for hook in tsuite[\"hooks\"][\"enter\"]:\n rcode = script_run(trun, hook)\n if rcode:\n break\n if trun[\"conf\"][\"VERBOSE\"]:\n cij.emph(\"rnr:tsuite:enter { rcode: %r } \" % rcode, rcode)\n return rcode", - "docstring": "Triggers when entering the given testsuite" - }, - { - "code": "def group(ctx, project, group):\n ctx.obj = ctx.obj or {}\n ctx.obj['project'] = project\n ctx.obj['group'] = group", - "docstring": "Commands for experiment groups." - }, - { - "code": "def writeToCheckpoint(self, checkpointDir):\n proto = self.getSchema().new_message()\n self.write(proto)\n checkpointPath = self._getModelCheckpointFilePath(checkpointDir)\n if os.path.exists(checkpointDir):\n if not os.path.isdir(checkpointDir):\n raise Exception((\"Existing filesystem entry <%s> is not a model\"\n \" checkpoint -- refusing to delete (not a directory)\") \\\n % checkpointDir)\n if not os.path.isfile(checkpointPath):\n raise Exception((\"Existing filesystem entry <%s> is not a model\"\n \" checkpoint -- refusing to delete\"\\\n \" (%s missing or not a file)\") % \\\n (checkpointDir, checkpointPath))\n shutil.rmtree(checkpointDir)\n self.__makeDirectoryFromAbsolutePath(checkpointDir)\n with open(checkpointPath, 'wb') as f:\n proto.write(f)", - "docstring": "Serializes model using capnproto and writes data to ``checkpointDir``" - }, - { - "code": "def fetch_by_name(self, name):\n service = self.name_index.get(name)\n if not service:\n raise ServiceNotFound\n return Service(service)", - "docstring": "Get service for given ``name`` from memory storage." - }, - { - "code": "def tool_factory(clsname, name, driver, base=GromacsCommand):\n clsdict = {\n 'command_name': name,\n 'driver': driver,\n '__doc__': property(base._get_gmx_docs)\n }\n return type(clsname, (base,), clsdict)", - "docstring": "Factory for GromacsCommand derived types." - }, - { - "code": "def commonprefix(m):\n \"Given a list of pathnames, returns the longest common leading component\"\n if not m: return ''\n s1 = min(m)\n s2 = max(m)\n for i, c in enumerate(s1):\n if c != s2[i]:\n return s1[:i]\n return s1", - "docstring": "Given a list of pathnames, returns the longest common leading component" - }, - { - "code": "def service_list(service=None, key_name=None, **kwargs):\n resp_list = []\n req = service.list(**kwargs)\n while req is not None:\n resp = req.execute()\n if key_name and key_name in resp:\n resp_list.extend(resp[key_name])\n else:\n resp_list.append(resp)\n if hasattr(service, 'list_next'):\n req = service.list_next(previous_request=req,\n previous_response=resp)\n else:\n req = None\n return resp_list", - "docstring": "General list function for Google APIs." - }, - { - "code": "def setup(self, context):\n myindex = context.get_partition_index()\n self._files_to_consume = self._files[myindex::context.get_num_partitions()]\n self.logger.info(\"TextFileSpout files to consume %s\" % self._files_to_consume)\n self._lines_to_consume = self._get_next_lines()\n self._emit_count = 0", - "docstring": "Implements TextFile Generator's setup method" - }, - { - "code": "def query_by_group(cls, group_or_id, with_invitations=False, **kwargs):\n if isinstance(group_or_id, Group):\n id_group = group_or_id.id\n else:\n id_group = group_or_id\n if not with_invitations:\n return cls._filter(\n cls.query.filter_by(id_group=id_group),\n **kwargs\n )\n else:\n return cls.query.filter(\n Membership.id_group == id_group,\n db.or_(\n Membership.state == MembershipState.PENDING_USER,\n Membership.state == MembershipState.ACTIVE\n )\n )", - "docstring": "Get a group's members." - }, - { - "code": "def rendering_finished(self, size, frame, cairo_ctx):\n surface = cairo_ctx.get_target()\n if self.format == 'png':\n surface.write_to_png(self._output_file(frame))\n surface.finish()\n surface.flush()", - "docstring": "Called when CairoCanvas has rendered a bot" - }, - { - "code": "def empty_like(array, dtype=None):\n array = numpy.asarray(array)\n if dtype is None: \n dtype = array.dtype\n return anonymousmemmap(array.shape, dtype)", - "docstring": "Create a shared memory array from the shape of array." - }, - { - "code": "def cmp_to_key(mycmp):\n class K(object):\n __slots__ = ['obj']\n def __init__(self, obj, *args):\n self.obj = obj\n def __lt__(self, other):\n return mycmp(self.obj, other.obj) < 0\n def __gt__(self, other):\n return mycmp(self.obj, other.obj) > 0\n def __eq__(self, other):\n return mycmp(self.obj, other.obj) == 0\n def __le__(self, other):\n return mycmp(self.obj, other.obj) <= 0\n def __ge__(self, other):\n return mycmp(self.obj, other.obj) >= 0\n def __ne__(self, other):\n return mycmp(self.obj, other.obj) != 0\n def __hash__(self):\n raise TypeError('hash not implemented')\n return K", - "docstring": "Convert a cmp= function into a key= function" - }, - { - "code": "def _before(self):\n if request.path in self.excluded_routes:\n request._tracy_exclude = True\n return\n request._tracy_start_time = monotonic()\n client = request.headers.get(trace_header_client, None)\n require_client = current_app.config.get(\"TRACY_REQUIRE_CLIENT\", False)\n if client is None and require_client:\n abort(400, \"Missing %s header\" % trace_header_client)\n request._tracy_client = client\n request._tracy_id = request.headers.get(trace_header_id, new_id())", - "docstring": "Records the starting time of this reqeust." - }, - { - "code": "def hash(self):\n hash_list = []\n for key, value in sorted(self.__dict__.items()):\n if not callable(value):\n if isinstance(value, np.ndarray):\n hash_list.append(value.tostring())\n else:\n hash_list.append(str(value))\n return hashlib.md5(repr(hash_list).encode()).hexdigest()", - "docstring": "Return an hash string computed on the PSF data." - }, - { - "code": "def env():\n if cij.ssh.env():\n cij.err(\"cij.pci.env: invalid SSH environment\")\n return 1\n pci = cij.env_to_dict(PREFIX, REQUIRED)\n pci[\"BUS_PATH\"] = \"/sys/bus/pci\"\n pci[\"DEV_PATH\"] = os.sep.join([pci[\"BUS_PATH\"], \"devices\", pci[\"DEV_NAME\"]])\n cij.env_export(PREFIX, EXPORTED, pci)\n return 0", - "docstring": "Verify PCI variables and construct exported variables" - }, - { - "code": "def create_milestone_payment(session, project_id, bidder_id, amount,\n reason, description):\n milestone_data = {\n 'project_id': project_id,\n 'bidder_id': bidder_id,\n 'amount': amount,\n 'reason': reason,\n 'description': description\n }\n response = make_post_request(session, 'milestones',\n json_data=milestone_data)\n json_data = response.json()\n if response.status_code == 200:\n milestone_data = json_data['result']\n return Milestone(milestone_data)\n else:\n raise MilestoneNotCreatedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'])", - "docstring": "Create a milestone payment" - }, - { - "code": "def quote(c):\n i = ord(c)\n return ESCAPE + HEX[i//16] + HEX[i%16]", - "docstring": "Quote a single character." - }, - { - "code": "def check_nonce(self, request, oauth_request):\n oauth_nonce = oauth_request['oauth_nonce']\n oauth_timestamp = oauth_request['oauth_timestamp']\n return check_nonce(request, oauth_request, oauth_nonce, oauth_timestamp)", - "docstring": "Checks nonce of request, and return True if valid." - }, - { - "code": "def CALLDATACOPY(self, mem_offset, data_offset, size):\n if issymbolic(size):\n if solver.can_be_true(self._constraints, size <= len(self.data) + 32):\n self.constraints.add(size <= len(self.data) + 32)\n raise ConcretizeArgument(3, policy='SAMPLED')\n if issymbolic(data_offset):\n if solver.can_be_true(self._constraints, data_offset == self._used_calldata_size):\n self.constraints.add(data_offset == self._used_calldata_size)\n raise ConcretizeArgument(2, policy='SAMPLED')\n self._use_calldata(data_offset, size)\n self._allocate(mem_offset, size)\n for i in range(size):\n try:\n c = Operators.ITEBV(8, data_offset + i < len(self.data), Operators.ORD(self.data[data_offset + i]), 0)\n except IndexError:\n c = 0\n self._store(mem_offset + i, c)", - "docstring": "Copy input data in current environment to memory" - }, - { - "code": "def connect(self):\n if JwtBuilder is None:\n raise NotConnectedToOpenEdX(\"This package must be installed in an OpenEdX environment.\")\n now = int(time())\n jwt = JwtBuilder.create_jwt_for_user(self.user)\n self.client = EdxRestApiClient(\n self.API_BASE_URL, append_slash=self.APPEND_SLASH, jwt=jwt,\n )\n self.expires_at = now + self.expires_in", - "docstring": "Connect to the REST API, authenticating with a JWT for the current user." - }, - { - "code": "def group_nodes_by_annotation(graph: BELGraph, annotation: str = 'Subgraph') -> Mapping[str, Set[BaseEntity]]:\n result = defaultdict(set)\n for u, v, d in graph.edges(data=True):\n if not edge_has_annotation(d, annotation):\n continue\n result[d[ANNOTATIONS][annotation]].add(u)\n result[d[ANNOTATIONS][annotation]].add(v)\n return dict(result)", - "docstring": "Group the nodes occurring in edges by the given annotation." - }, - { - "code": "def get(self, id, **kwargs):\n return super(DomainRecords, self).get(id, **kwargs)", - "docstring": "Retrieve a single domain record given the id" - }, - { - "code": "def _read_header(self, handle):\n header_data = unrarlib.RARHeaderDataEx()\n try:\n res = unrarlib.RARReadHeaderEx(handle, ctypes.byref(header_data))\n rarinfo = RarInfo(header=header_data)\n except unrarlib.ArchiveEnd:\n return None\n except unrarlib.MissingPassword:\n raise RuntimeError(\"Archive is encrypted, password required\")\n except unrarlib.BadPassword:\n raise RuntimeError(\"Bad password for Archive\")\n except unrarlib.UnrarException as e:\n raise BadRarFile(str(e))\n return rarinfo", - "docstring": "Read current member header into a RarInfo object." - }, - { - "code": "def annotate_metadata_platform(repo):\n print(\"Added platform information\")\n package = repo.package\n mgr = plugins_get_mgr()\n repomgr = mgr.get(what='instrumentation', name='platform')\n package['platform'] = repomgr.get_metadata()", - "docstring": "Update metadata host information" - }, - { - "code": "def appendWif(self, wif):\n if wif:\n try:\n self.privatekey_class(wif)\n self.wifs.add(wif)\n except Exception:\n raise InvalidWifError", - "docstring": "Add a wif that should be used for signing of the transaction." - }, - { - "code": "def baseurl(url):\n parsed_url = urlparse.urlparse(url)\n if not parsed_url.netloc or parsed_url.scheme not in (\"http\", \"https\"):\n raise ValueError('bad url')\n service_url = \"%s://%s%s\" % (parsed_url.scheme, parsed_url.netloc, parsed_url.path.strip())\n return service_url", - "docstring": "return baseurl of given url" - }, - { - "code": "def copy(self):\n return self.__class__(\n amount=self[\"amount\"],\n asset=self[\"asset\"].copy(),\n blockchain_instance=self.blockchain,\n )", - "docstring": "Copy the instance and make sure not to use a reference" - }, - { - "code": "def plot(self):\n if self.results_table == None:\n return \"no results found\"\n else:\n bb = self.results_table.sort_values(\n by=[\"ABCD\", \"ACBD\"], \n ascending=[False, True],\n )\n import toyplot\n c = toyplot.Canvas(width=600, height=200)\n a = c.cartesian()\n m = a.bars(bb)\n return c, a, m", - "docstring": "return a toyplot barplot of the results table." - }, - { - "code": "def reserve_ipblock(self, ipblock):\n properties = {\n \"name\": ipblock.name\n }\n if ipblock.location:\n properties['location'] = ipblock.location\n if ipblock.size:\n properties['size'] = str(ipblock.size)\n raw = {\n \"properties\": properties,\n }\n response = self._perform_request(\n url='/ipblocks', method='POST', data=json.dumps(raw))\n return response", - "docstring": "Reserves an IP block within your account." - }, - { - "code": "def marv(ctx, config, loglevel, logfilter, verbosity):\n if config is None:\n cwd = os.path.abspath(os.path.curdir)\n while cwd != os.path.sep:\n config = os.path.join(cwd, 'marv.conf')\n if os.path.exists(config):\n break\n cwd = os.path.dirname(cwd)\n else:\n config = '/etc/marv/marv.conf'\n if not os.path.exists(config):\n config = None\n ctx.obj = config\n setup_logging(loglevel, verbosity, logfilter)", - "docstring": "Manage a Marv site" - }, - { - "code": "def container_id(self, name):\n container = self._containers.get(name, None)\n if not container is None:\n return container.get('id', None)\n return None", - "docstring": "Try to find the container ID with the specified name" - }, - { - "code": "def populate(publications):\n\tcustomlinks = CustomLink.objects.filter(publication__in=publications)\n\tcustomfiles = CustomFile.objects.filter(publication__in=publications)\n\tpublications_ = {}\n\tfor publication in publications:\n\t\tpublication.links = []\n\t\tpublication.files = []\n\t\tpublications_[publication.id] = publication\n\tfor link in customlinks:\n\t\tpublications_[link.publication_id].links.append(link)\n\tfor file in customfiles:\n\t\tpublications_[file.publication_id].files.append(file)", - "docstring": "Load custom links and files from database and attach to publications." - }, - { - "code": "def get(self):\n if self.is_shutdown():\n return None\n while len(self._states) == 0:\n if self.running == 0:\n return None\n if self.is_shutdown():\n return None\n logger.debug(\"Waiting for available states\")\n self._lock.wait()\n state_id = self._policy.choice(list(self._states))\n if state_id is None:\n return None\n del self._states[self._states.index(state_id)]\n return state_id", - "docstring": "Dequeue a state with the max priority" - }, - { - "code": "def _transmit_delete(self, channel_metadata_item_map):\n for chunk in chunks(channel_metadata_item_map, self.enterprise_configuration.transmission_chunk_size):\n serialized_chunk = self._serialize_items(list(chunk.values()))\n try:\n self.client.delete_content_metadata(serialized_chunk)\n except ClientError as exc:\n LOGGER.error(\n 'Failed to delete [%s] content metadata items for integrated channel [%s] [%s]',\n len(chunk),\n self.enterprise_configuration.enterprise_customer.name,\n self.enterprise_configuration.channel_code,\n )\n LOGGER.error(exc)\n else:\n self._delete_transmissions(chunk.keys())", - "docstring": "Transmit content metadata deletion to integrated channel." - }, - { - "code": "def nfilter1(data, reps):\n if sum(reps) >= data.paramsdict[\"mindepth_majrule\"] and \\\n sum(reps) <= data.paramsdict[\"maxdepth\"]:\n return 1\n else:\n return 0", - "docstring": "applies read depths filter" - }, - { - "code": "def _decrypt_masterpassword(self):\n aes = AESCipher(self.password)\n checksum, encrypted_master = self.config[self.config_key].split(\"$\")\n try:\n decrypted_master = aes.decrypt(encrypted_master)\n except Exception:\n self._raise_wrongmasterpassexception()\n if checksum != self._derive_checksum(decrypted_master):\n self._raise_wrongmasterpassexception()\n self.decrypted_master = decrypted_master", - "docstring": "Decrypt the encrypted masterkey" - }, - { - "code": "def parse(self, filelike, filename):\n self.log = log\n self.source = filelike.readlines()\n src = \"\".join(self.source)\n compile(src, filename, \"exec\")\n self.stream = TokenStream(StringIO(src))\n self.filename = filename\n self.all = None\n self.future_imports = set()\n self._accumulated_decorators = []\n return self.parse_module()", - "docstring": "Parse the given file-like object and return its Module object." - }, - { - "code": "def _characteristic_changed(self, characteristic):\n on_changed = self._char_on_changed.get(characteristic, None)\n if on_changed is not None:\n on_changed(characteristic.value().bytes().tobytes())\n char = characteristic_list().get(characteristic)\n if char is not None:\n char._value_read.set()", - "docstring": "Called when the specified characteristic has changed its value." - }, - { - "code": "def build_delete_node_by_hash(manager: Manager) -> Callable[[BELGraph, str], None]:\n @in_place_transformation\n def delete_node_by_hash(graph: BELGraph, node_hash: str) -> None:\n node = manager.get_dsl_by_hash(node_hash)\n graph.remove_node(node)\n return delete_node_by_hash", - "docstring": "Make a delete function that's bound to the manager." - }, - { - "code": "def __get_uuids_from_profile_name(self, profile_name):\n uuids = []\n with self.db.connect() as session:\n query = session.query(Profile).\\\n filter(Profile.name == profile_name)\n profiles = query.all()\n if profiles:\n for p in profiles:\n uuids.append(p.uuid)\n return uuids", - "docstring": "Get the uuid for a profile name" - }, - { - "code": "def heappushpop_max(heap, item):\n if heap and heap[0] > item:\n item, heap[0] = heap[0], item\n _siftup_max(heap, 0)\n return item", - "docstring": "Fast version of a heappush followed by a heappop." - }, - { - "code": "def _delete_transmissions(self, content_metadata_item_ids):\n ContentMetadataItemTransmission = apps.get_model(\n 'integrated_channel',\n 'ContentMetadataItemTransmission'\n )\n ContentMetadataItemTransmission.objects.filter(\n enterprise_customer=self.enterprise_configuration.enterprise_customer,\n integrated_channel_code=self.enterprise_configuration.channel_code(),\n content_id__in=content_metadata_item_ids\n ).delete()", - "docstring": "Delete ContentMetadataItemTransmision models associated with the given content metadata items." - }, - { - "code": "def report(function, *args, **kwds):\n try:\n function(*args, **kwds)\n except Exception:\n traceback.print_exc()", - "docstring": "Run a function, catch, report and discard exceptions" - }, - { - "code": "def CheckHeaderFileIncluded(filename, include_state, error):\n fileinfo = FileInfo(filename)\n if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()):\n return\n for ext in GetHeaderExtensions():\n basefilename = filename[0:len(filename) - len(fileinfo.Extension())]\n headerfile = basefilename + '.' + ext\n if not os.path.exists(headerfile):\n continue\n headername = FileInfo(headerfile).RepositoryName()\n first_include = None\n for section_list in include_state.include_list:\n for f in section_list:\n if headername in f[0] or f[0] in headername:\n return\n if not first_include:\n first_include = f[1]\n error(filename, first_include, 'build/include', 5,\n '%s should include its header file %s' % (fileinfo.RepositoryName(),\n headername))", - "docstring": "Logs an error if a source file does not include its header." - }, - { - "code": "def create_hireme_project(session, title, description,\n currency, budget, jobs, hireme_initial_bid):\n jobs.append(create_job_object(id=417))\n project_data = {'title': title,\n 'description': description,\n 'currency': currency,\n 'budget': budget,\n 'jobs': jobs,\n 'hireme': True,\n 'hireme_initial_bid': hireme_initial_bid\n }\n response = make_post_request(session, 'projects', json_data=project_data)\n json_data = response.json()\n if response.status_code == 200:\n project_data = json_data['result']\n p = Project(project_data)\n p.url = urljoin(session.url, 'projects/%s' % p.seo_url)\n return p\n else:\n raise ProjectNotCreatedException(message=json_data['message'],\n error_code=json_data['error_code'],\n request_id=json_data['request_id'],\n )", - "docstring": "Create a fixed project" - }, - { - "code": "def update_segment(self, selector, base, size, perms):\n logger.info(\"Updating selector %s to 0x%02x (%s bytes) (%s)\", selector, base, size, perms)\n if selector == 99:\n self.set_fs(base)\n else:\n logger.error(\"No way to write segment: %d\", selector)", - "docstring": "Only useful for setting FS right now." - }, - { - "code": "def gen_to_dev(self, address):\n cmd = [\"nvm_addr gen2dev\", self.envs[\"DEV_PATH\"], \"0x{:x}\".format(address)]\n status, stdout, _ = cij.ssh.command(cmd, shell=True)\n if status:\n raise RuntimeError(\"cij.liblight.gen_to_dev: cmd fail\")\n return int(re.findall(r\"dev: ([0-9a-fx]+)\", stdout)[0], 16)", - "docstring": "Generic address to device address" - }, - { - "code": "def launchAppByBundleId(bundleID):\n ws = AppKit.NSWorkspace.sharedWorkspace()\n r = ws.launchAppWithBundleIdentifier_options_additionalEventParamDescriptor_launchIdentifier_(\n bundleID,\n AppKit.NSWorkspaceLaunchAllowingClassicStartup,\n AppKit.NSAppleEventDescriptor.nullDescriptor(),\n None)\n if not r[0]:\n raise RuntimeError('Error launching specified application.')", - "docstring": "Launch the application with the specified bundle ID" - }, - { - "code": "def human(self, size, base=1000, units=' kMGTZ'):\n sign = '+' if size >= 0 else '-'\n size = abs(size)\n if size < 1000:\n return '%s%d' % (sign, size)\n for i, suffix in enumerate(units):\n unit = 1000 ** (i + 1)\n if size < unit:\n return ('%s%.01f%s' % (\n sign,\n size / float(unit) * base,\n suffix,\n )).strip()\n raise OverflowError", - "docstring": "Convert the input ``size`` to human readable, short form." - }, - { - "code": "def unlink_f(path):\n try:\n os.unlink(path)\n except OSError as err:\n if err.errno != errno.ENOENT:\n raise", - "docstring": "Unlink path but do not complain if file does not exist." - }, - { - "code": "def register(self, provider_class):\n if not issubclass(provider_class, BaseProvider):\n raise TypeError('%s is not a subclass of BaseProvider' % provider_class.__name__)\n if provider_class in self._registered_providers:\n raise AlreadyRegistered('%s is already registered' % provider_class.__name__)\n if issubclass(provider_class, DjangoProvider):\n signals.post_save.connect(\n self.invalidate_stored_oembeds,\n sender=provider_class._meta.model\n )\n self._registered_providers.append(provider_class)\n self.invalidate_providers()", - "docstring": "Registers a provider with the site." - }, - { - "code": "def _c_func(func, restype, argtypes, errcheck=None):\n func.restype = restype\n func.argtypes = argtypes\n if errcheck is not None:\n func.errcheck = errcheck\n return func", - "docstring": "Wrap c function setting prototype." - }, - { - "code": "async def set_presence(self, set_presence_request):\n response = hangouts_pb2.SetPresenceResponse()\n await self._pb_request('presence/setpresence',\n set_presence_request, response)\n return response", - "docstring": "Set the presence status." - }, - { - "code": "def close(self):\n self.process.stdout.close()\n self.process.stderr.close()\n self.running = False", - "docstring": "Close outputs of process." - }, - { - "code": "def run(cmd, **kw):\n kw = kw.copy()\n kw.setdefault('warn', False)\n report_error = kw.pop('report_error', True)\n runner = kw.pop('runner', invoke_run)\n try:\n return runner(cmd, **kw)\n except exceptions.Failure as exc:\n sys.stdout.flush()\n sys.stderr.flush()\n if report_error:\n notify.error(\"Command `{}` failed with RC={}!\".format(cmd, exc.result.return_code,))\n raise\n finally:\n sys.stdout.flush()\n sys.stderr.flush()", - "docstring": "Run a command and flush its output." - }, - { - "code": "def fillHSV(self, hsv, start=0, end=-1):\n self.fill(conversions.hsv2rgb(hsv), start, end)", - "docstring": "Fill the entire strip with HSV color tuple" - }, - { - "code": "def walk(self, maxresults=100, maxdepth=None):\n log.debug(\"step\")\n self.seen = {}\n self.ignore(self, self.__dict__, self.obj, self.seen, self._ignore)\n self.ignore_caller()\n self.maxdepth = maxdepth\n count = 0\n log.debug(\"will iterate results\")\n for result in self._gen(self.obj):\n log.debug(\"will yeld\")\n yield result\n count += 1\n if maxresults and count >= maxresults:\n yield 0, 0, \"==== Max results reached ====\"\n return", - "docstring": "Walk the object tree, ignoring duplicates and circular refs." - }, - { - "code": "def with_access_to(self, request, *args, **kwargs):\n self.queryset = self.queryset.order_by('name')\n enterprise_id = self.request.query_params.get('enterprise_id', None)\n enterprise_slug = self.request.query_params.get('enterprise_slug', None)\n enterprise_name = self.request.query_params.get('search', None)\n if enterprise_id is not None:\n self.queryset = self.queryset.filter(uuid=enterprise_id)\n elif enterprise_slug is not None:\n self.queryset = self.queryset.filter(slug=enterprise_slug)\n elif enterprise_name is not None:\n self.queryset = self.queryset.filter(name__icontains=enterprise_name)\n return self.list(request, *args, **kwargs)", - "docstring": "Returns the list of enterprise customers the user has a specified group permission access to." - }, - { - "code": "def create_supervisor_services(self, site):\n self.vprint('create_supervisor_services:', site)\n self.set_site_specifics(site=site)\n r = self.local_renderer\n if self.verbose:\n print('r.env:')\n pprint(r.env, indent=4)\n self.vprint('r.env.has_worker:', r.env.has_worker)\n if not r.env.has_worker:\n self.vprint('skipping: no celery worker')\n return\n if self.name.lower() not in self.genv.services:\n self.vprint('skipping: celery not enabled')\n return\n hostname = self.current_hostname\n target_sites = self.genv.available_sites_by_host.get(hostname, None)\n if target_sites and site not in target_sites:\n self.vprint('skipping: site not supported on this server')\n return\n self.render_paths()\n conf_name = 'celery_%s.conf' % site\n ret = r.render_to_string('celery/celery_supervisor.template.conf')\n return conf_name, ret", - "docstring": "This is called for each site to render a Celery config file." - }, - { - "code": "def _build_backend():\n backend_path = os.environ.get('PEP517_BACKEND_PATH')\n if backend_path:\n extra_pathitems = backend_path.split(os.pathsep)\n sys.path[:0] = extra_pathitems\n ep = os.environ['PEP517_BUILD_BACKEND']\n mod_path, _, obj_path = ep.partition(':')\n try:\n obj = import_module(mod_path)\n except ImportError:\n raise BackendUnavailable(traceback.format_exc())\n if backend_path:\n if not any(\n contained_in(obj.__file__, path)\n for path in extra_pathitems\n ):\n raise BackendInvalid(\"Backend was not loaded from backend-path\")\n if obj_path:\n for path_part in obj_path.split('.'):\n obj = getattr(obj, path_part)\n return obj", - "docstring": "Find and load the build backend" - }, - { - "code": "def lib2to3_unparse(node, *, hg=False):\n code = str(node)\n if hg:\n from retype_hgext import apply_job_security\n code = apply_job_security(code)\n return code", - "docstring": "Given a lib2to3 node, return its string representation." - }, - { - "code": "def sendall(self, line):\n if isinstance(line, APRSPacket):\n line = str(line)\n elif not isinstance(line, string_type):\n raise TypeError(\"Expected line to be str or APRSPacket, got %s\", type(line))\n if not self._connected:\n raise ConnectionError(\"not connected\")\n if line == \"\":\n return\n line = line.rstrip(\"\\r\\n\") + \"\\r\\n\"\n try:\n self.sock.setblocking(1)\n self.sock.settimeout(5)\n self._sendall(line)\n except socket.error as exp:\n self.close()\n raise ConnectionError(str(exp))", - "docstring": "Send a line, or multiple lines sperapted by '\\\\r\\\\n'" - }, - { - "code": "def annotate_metadata_code(repo, files):\n package = repo.package\n package['code'] = []\n for p in files:\n matching_files = glob2.glob(\"**/{}\".format(p))\n for f in matching_files:\n absf = os.path.abspath(f)\n print(\"Add commit data for {}\".format(f))\n package['code'].append(OrderedDict([\n ('script', f),\n ('permalink', repo.manager.permalink(repo, absf)),\n ('mimetypes', mimetypes.guess_type(absf)[0]),\n ('sha256', compute_sha256(absf))\n ]))", - "docstring": "Update metadata with the commit information" - }, - { - "code": "def _cell(x):\n x_no_none = [i if i is not None else \"\" for i in x]\n return array(x_no_none, dtype=np_object)", - "docstring": "translate an array x into a MATLAB cell array" - }, - { - "code": "def warn(message, category=None, stacklevel=1):\n if isinstance(message, Warning):\n category = message.__class__\n if category is None:\n category = UserWarning\n assert issubclass(category, Warning)\n try:\n caller = sys._getframe(stacklevel)\n except ValueError:\n globals = sys.__dict__\n lineno = 1\n else:\n globals = caller.f_globals\n lineno = caller.f_lineno\n if '__name__' in globals:\n module = globals['__name__']\n else:\n module = \"\"\n filename = globals.get('__file__')\n if filename:\n fnl = filename.lower()\n if fnl.endswith((\".pyc\", \".pyo\")):\n filename = filename[:-1]\n else:\n if module == \"__main__\":\n try:\n filename = sys.argv[0]\n except AttributeError:\n filename = '__main__'\n if not filename:\n filename = module\n registry = globals.setdefault(\"__warningregistry__\", {})\n warn_explicit(message, category, filename, lineno, module, registry,\n globals)", - "docstring": "Issue a warning, or maybe ignore it or raise an exception." - }, - { - "code": "def from_json(cls, api_client, data):\n self = cls(api_client)\n PandoraModel.populate_fields(api_client, self, data)\n return self", - "docstring": "Convert one JSON value to a model object" - }, - { - "code": "def get(self, key_name, decrypt=True):\n self._assert_valid_stash()\n key = self._storage.get(key_name).copy()\n if not key.get('value'):\n return None\n if decrypt:\n key['value'] = self._decrypt(key['value'])\n audit(\n storage=self._storage.db_path,\n action='GET',\n message=json.dumps(dict(key_name=key_name)))\n return key", - "docstring": "Return a key with its parameters if it was found." - }, - { - "code": "def set(self, ring, angle, color):\n pixel = self.angleToPixel(angle, ring)\n self._set_base(pixel, color)", - "docstring": "Set pixel to RGB color tuple" - }, - { - "code": "def _i2c_write_bytes(self, data):\n for byte in data:\n self._command.append(str(bytearray((0x11, 0x00, 0x00, byte))))\n self._ft232h.output_pins({0: GPIO.LOW, 1: GPIO.HIGH}, write=False)\n self._command.append(self._ft232h.mpsse_gpio() * _REPEAT_DELAY)\n self._command.append('\\x22\\x00')\n self._expected += len(data)", - "docstring": "Write the specified number of bytes to the chip." - }, - { - "code": "def provider_for_url(self, url):\n for provider, regex in self.get_registry().items():\n if re.match(regex, url) is not None:\n return provider\n raise OEmbedMissingEndpoint('No endpoint matches URL: %s' % url)", - "docstring": "Find the right provider for a URL" - }, - { - "code": "def update_terminal_colors(self):\n self.color_scheme = self.create_color_scheme(\n background=self.syntax_highlighter.color_scheme.background,\n foreground=self.syntax_highlighter.color_scheme.formats['normal'].foreground().color())", - "docstring": "Update terminal color scheme based on the pygments color scheme colors" - }, - { - "code": "def sigma_prime(self):\n return _np.sqrt(self.emit/self.beta(self.E))", - "docstring": "Divergence of matched beam" - }, - { - "code": "def to_boulderio(infile, outfile):\n seq_reader = sequences.file_reader(infile)\n f_out = utils.open_file_write(outfile)\n for sequence in seq_reader:\n print(\"SEQUENCE_ID=\" + sequence.id, file=f_out)\n print(\"SEQUENCE_TEMPLATE=\" + sequence.seq, file=f_out)\n print(\"=\", file=f_out)\n utils.close(f_out)", - "docstring": "Converts input sequence file into a \"Boulder-IO format\", as used by primer3" - }, - { - "code": "def draw_cornu_flat(x0, y0, t0, t1, s0, c0, flip, cs, ss, cmd):\n for j in range(0, 100):\n t = j * .01\n s, c = eval_cornu(t0 + t * (t1 - t0))\n s *= flip\n s -= s0\n c -= c0\n x = c * cs - s * ss\n y = s * cs + c * ss\n print_pt(x0 + x, y0 + y, cmd)\n cmd = 'lineto'\n return cmd", - "docstring": "Raph Levien's code draws fast LINETO segments." - }, - { - "code": "def print_parents(self):\n if self.gender == female:\n title = 'Daughter'\n elif self.gender == male:\n title = 'Son'\n else:\n title = 'Child'\n p1 = self.parents[0]\n p2 = self.parents[1]\n template = '%s of %s, the %s, and %s, the %s.'\n print(template % (title, p1.name, p1.epithet, p2.name, p2.epithet))", - "docstring": "Print parents' names and epithets." - }, - { - "code": "def clear(self):\n self.ops = []\n self.wifs = set()\n self.signing_accounts = []\n self[\"expiration\"] = None\n dict.__init__(self, {})", - "docstring": "Clear the transaction builder and start from scratch" - }, - { - "code": "def resize(self, width, height):\n if not self.fbo:\n return\n self.width = width // self.widget.devicePixelRatio()\n self.height = height // self.widget.devicePixelRatio()\n self.buffer_width = width\n self.buffer_height = height\n super().resize(width, height)", - "docstring": "Pyqt specific resize callback." - }, - { - "code": "def grid_stack_from_deflection_stack(grid_stack, deflection_stack):\n if deflection_stack is not None:\n def minus(grid, deflections):\n return grid - deflections\n return grid_stack.map_function(minus, deflection_stack)", - "docstring": "For a deflection stack, comput a new grid stack but subtracting the deflections" - }, - { - "code": "def free(self):\n if not self.borrowed:\n self.xmlnode.unlinkNode()\n self.xmlnode.freeNode()\n self.xmlnode=None", - "docstring": "Unlink and free the XML node owned by `self`." - }, - { - "code": "def _step6func(self, \n samples, \n noreverse, \n force, \n randomseed, \n ipyclient, \n **kwargs):\n samples = _get_samples(self, samples)\n csamples = self._samples_precheck(samples, 6, force)\n if self._headers:\n print(\"\\n Step 6: Clustering at {} similarity across {} samples\".\\\n format(self.paramsdict[\"clust_threshold\"], len(csamples)))\n if not csamples:\n raise IPyradError(FIRST_RUN_5)\n elif not force:\n if all([i.stats.state >= 6 for i in csamples]):\n print(DATABASE_EXISTS.format(len(samples)))\n return\n assemble.cluster_across.run(\n self, \n csamples, \n noreverse,\n force, \n randomseed, \n ipyclient, \n **kwargs)", - "docstring": "Hidden function to start Step 6." - }, - { - "code": "def register_templates():\n event_templates = [current_stats._events_config[e]\n ['templates']\n for e in\n current_stats._events_config]\n aggregation_templates = [current_stats._aggregations_config[a]\n ['templates']\n for a in\n current_stats._aggregations_config]\n return event_templates + aggregation_templates", - "docstring": "Register elasticsearch templates for events." - }, - { - "code": "def root_path():\n module_dir = os.path.dirname(globals()['__file__'])\n return os.path.dirname(os.path.dirname(module_dir))", - "docstring": "Get the absolute path to the root of the demosys package" - }, - { - "code": "def _parse_field_value(line):\n if line.startswith(':'):\n return None, None\n if ':' not in line:\n return line, ''\n field, value = line.split(':', 1)\n value = value[1:] if value.startswith(' ') else value\n return field, value", - "docstring": "Parse the field and value from a line." - }, - { - "code": "def score(self, word, docid):\n \"Compute a score for this word on this docid.\"\n return (math.log(1 + self.index[word][docid])\n / math.log(1 + self.documents[docid].nwords))", - "docstring": "Compute a score for this word on this docid." - }, - { - "code": "def mods_genre(self):\n\t\ttype2genre = {\n\t\t\t\t'conference': 'conference publication',\n\t\t\t\t'book chapter': 'bibliography',\n\t\t\t\t'unpublished': 'article'\n\t\t\t}\n\t\ttp = str(self.type).lower()\n\t\treturn type2genre.get(tp, tp)", - "docstring": "Guesses an appropriate MODS XML genre type." - }, - { - "code": "def _disassoc(self, url_fragment, me, other):\n url = self.endpoint + '%d/%s/' % (me, url_fragment)\n r = client.get(url, params={'id': other}).json()\n if r['count'] == 0:\n return {'changed': False}\n r = client.post(url, data={'disassociate': True, 'id': other})\n return {'changed': True}", - "docstring": "Disassociate the `other` record from the `me` record." - }, - { - "code": "def close(self):\n self._con.commit()\n self._cur.close()\n self._con.close()", - "docstring": "Commits any pending transactions and closes the database." - }, - { - "code": "def reset_creation_info(self):\n self.created_date_set = False\n self.creation_comment_set = False\n self.lics_list_ver_set = False", - "docstring": "Resets builder state to allow building new creation info." - }, - { - "code": "def validate(self):\n for env in list(self):\n if not env.exists:\n self.remove(env)", - "docstring": "Validate all the entries in the environment cache." - }, - { - "code": "def figureStimulus(abf,sweeps=[0]):\n stimuli=[2.31250, 2.35270]\n for sweep in sweeps:\n abf.setsweep(sweep)\n for stimulus in stimuli:\n S1=int(abf.pointsPerSec*stimulus)\n S2=int(abf.pointsPerSec*(stimulus+0.001))\n abf.sweepY[S1:S2]=np.nan\n I1=int(abf.pointsPerSec*2.2)\n I2=int(abf.pointsPerSec*2.6)\n baseline=np.average(abf.sweepY[int(abf.pointsPerSec*2.0):int(abf.pointsPerSec*2.2)])\n Ys=lowPassFilter(abf.sweepY[I1:I2])-baseline\n Xs=abf.sweepX2[I1:I1+len(Ys)].flatten()\n plt.plot(Xs,Ys,alpha=.5,lw=2)\n return", - "docstring": "Create a plot of one area of interest of a single sweep." - }, - { - "code": "def suggest_spelling(q, wait=10, asynchronous=False, cached=False):\n return YahooSpelling(q, wait, asynchronous, cached)", - "docstring": "Returns list of suggested spelling corrections for the given query." - }, - { - "code": "async def create_conversation(self, create_conversation_request):\n response = hangouts_pb2.CreateConversationResponse()\n await self._pb_request('conversations/createconversation',\n create_conversation_request, response)\n return response", - "docstring": "Create a new conversation." - }, - { - "code": "def check_glfw_version(self):\n print(\"glfw version: {} (python wrapper version {})\".format(glfw.get_version(), glfw.__version__))\n if glfw.get_version() < self.min_glfw_version:\n raise ValueError(\"Please update glfw binaries to version {} or later\".format(self.min_glfw_version))", - "docstring": "Ensure glfw library version is compatible" - }, - { - "code": "def version(self):\n ver = Version()\n ver.conn = self.conn\n ver.attrs = {\n 'service_id': self.attrs['id'],\n }\n ver.save()\n return ver", - "docstring": "Create a new version under this service." - }, - { - "code": "def runnable_effects(self) -> List[Type[Effect]]:\n return [cls for cls in self.effect_classes if cls.runnable]", - "docstring": "Returns the runnable effect in the package" - }, - { - "code": "def kill_all(self):\n for pid in self.children:\n try:\n os.kill(pid, signal.SIGTRAP)\n except OSError:\n continue\n self.join()", - "docstring": "kill all slaves and reap the monitor" - }, - { - "code": "def _get_notify_msg_and_payload(result, stream):\n token = stream.advance_past_chars([\"=\", \"*\"])\n token = int(token) if token != \"\" else None\n logger.debug(\"%s\", fmt_green(\"parsing message\"))\n message = stream.advance_past_chars([\",\"])\n logger.debug(\"parsed message\")\n logger.debug(\"%s\", fmt_green(message))\n payload = _parse_dict(stream)\n return token, message.strip(), payload", - "docstring": "Get notify message and payload dict" - }, - { - "code": "def gen_methods(self, *args, **kwargs):\n token = args[0]\n inst = self.inst\n prefix = self._method_prefix\n for method_key in self.gen_method_keys(*args, **kwargs):\n method = getattr(inst, prefix + method_key, None)\n if method is not None:\n yield method\n typename = type(token).__name__\n yield from self.check_basetype(\n token, typename, self.builtins.get(typename))\n for basetype_name in self.interp_types:\n yield from self.check_basetype(\n token, basetype_name, getattr(self.types, basetype_name, None))\n for basetype_name in self.abc_types:\n yield from self.check_basetype(\n token, basetype_name, getattr(self.collections, basetype_name, None))\n yield from self.gen_generic()", - "docstring": "Find all method names this input dispatches to." - }, - { - "code": "def message(self):\n try:\n with open(join(self.fs_path, u'message')) as message_file:\n return u''.join([line.decode('utf-8') for line in message_file.readlines()])\n except IOError:\n return u''", - "docstring": "returns the user submitted text" - } -] \ No newline at end of file