| [ | |
| { | |
| "code": "def _publish_instruction_as_executed(self, insn):\n self._icount += 1\n self._publish('did_execute_instruction', self._last_pc, self.PC, insn)" | |
| }, | |
| { | |
| "code": "def reset_lock(self):\n redis_key = self.CELERY_LOCK.format(task_id=self.task_identifier)\n self.celery_self.backend.client.delete(redis_key)" | |
| }, | |
| { | |
| "code": "def check_max_filesize(chosen_file, max_size):\n if os.path.getsize(chosen_file) > max_size:\n return False\n else:\n return True" | |
| }, | |
| { | |
| "code": "def _generate_index(self):\n self._dict = {v.id: k for k, v in enumerate(self)}" | |
| }, | |
| { | |
| "code": "def prepare_attrib_mapping(self, primitive):\n buffer_info = []\n for name, accessor in primitive.attributes.items():\n info = VBOInfo(*accessor.info())\n info.attributes.append((name, info.components))\n if buffer_info and buffer_info[-1].buffer_view == info.buffer_view:\n if buffer_info[-1].interleaves(info):\n buffer_info[-1].merge(info)\n continue\n buffer_info.append(info)\n return buffer_info" | |
| }, | |
| { | |
| "code": "def create_new_locale(\n self,\n template_id,\n locale,\n version_name,\n subject,\n text='',\n html='',\n timeout=None\n ):\n payload = {\n 'locale': locale,\n 'name': version_name,\n 'subject': subject\n }\n if html:\n payload['html'] = html\n if text:\n payload['text'] = text\n return self._api_request(\n self.TEMPLATES_LOCALES_ENDPOINT % template_id,\n self.HTTP_POST,\n payload=payload,\n timeout=timeout\n )" | |
| }, | |
| { | |
| "code": "def getZeroedOutEncoding(self, n):\n assert all(field.numRecords>n for field in self.fields)\n encoding = np.concatenate([field.encoder.encode(SENTINEL_VALUE_FOR_MISSING_DATA)\\\n if field.isPredictedField else field.encodings[n] for field in self.fields])\n return encoding" | |
| }, | |
| { | |
| "code": "def angle(x0, y0, x1, y1):\r\n return degrees(atan2(y1-y0, x1-x0))" | |
| }, | |
| { | |
| "code": "def read_value(self):\n pass\n self._value_read.clear()\n self._device._peripheral.readValueForDescriptor(self._descriptor)\n if not self._value_read.wait(timeout_sec):\n raise RuntimeError('Exceeded timeout waiting to read characteristic value!')\n return self._value" | |
| }, | |
| { | |
| "code": "def find_chunk (phrase, np):\n for i in iter(range(0, len(phrase))):\n parsed_np = find_chunk_sub(phrase, np, i)\n if parsed_np:\n return parsed_np" | |
| }, | |
| { | |
| "code": "def fluent(func):\n @wraps(func)\n def fluent_interface(instance, *args, **kwargs):\n ret = func(instance, *args, **kwargs)\n if ret is not None:\n return ret\n return instance\n return fluent_interface" | |
| }, | |
| { | |
| "code": "def _generalized_word_starts(self, xs):\n self.word_starts = []\n i = 0\n for n in range(len(xs)):\n self.word_starts.append(i)\n i += len(xs[n]) + 1" | |
| }, | |
| { | |
| "code": "def modify_input():\n doc_mapper = DocMapper()\n if doc_mapper.is_pipe:\n objects = [obj for obj in doc_mapper.get_pipe()]\n modified = modify_data(objects)\n for line in modified:\n obj = doc_mapper.line_to_object(line)\n obj.save()\n print_success(\"Object(s) successfully changed\")\n else:\n print_error(\"Please use this tool with pipes\")" | |
| }, | |
| { | |
| "code": "def course_enrollments(self, request, pk):\n enterprise_customer = self.get_object()\n serializer = serializers.EnterpriseCustomerCourseEnrollmentsSerializer(\n data=request.data,\n many=True,\n context={\n 'enterprise_customer': enterprise_customer,\n 'request_user': request.user,\n }\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=HTTP_200_OK)\n return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)" | |
| }, | |
| { | |
| "code": "def mods_genre(self):\n\t\ttype2genre = {\n\t\t\t\t'conference': 'conference publication',\n\t\t\t\t'book chapter': 'bibliography',\n\t\t\t\t'unpublished': 'article'\n\t\t\t}\n\t\ttp = str(self.type).lower()\n\t\treturn type2genre.get(tp, tp)" | |
| }, | |
| { | |
| "code": "def _remove_default_tz_bindings(self, context, network_id):\n default_tz = CONF.NVP.default_tz\n if not default_tz:\n LOG.warn(\"additional_default_tz_types specified, \"\n \"but no default_tz. Skipping \"\n \"_remove_default_tz_bindings().\")\n return\n if not network_id:\n LOG.warn(\"neutron network_id not specified, skipping \"\n \"_remove_default_tz_bindings()\")\n return\n for net_type in CONF.NVP.additional_default_tz_types:\n if net_type in TZ_BINDINGS:\n binding = TZ_BINDINGS[net_type]\n binding.remove(context, default_tz, network_id)\n else:\n LOG.warn(\"Unknown default tz type %s\" % (net_type))" | |
| }, | |
| { | |
| "code": "def _convert(value, to_type, default=None):\n try:\n return default if value is None else to_type(value)\n except ValueError:\n return default" | |
| }, | |
| { | |
| "code": "def paths_from_env(prefix=None, names=None):\n def expand_path(path):\n return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))\n if prefix is None:\n prefix = \"CIJ\"\n if names is None:\n names = [\n \"ROOT\", \"ENVS\", \"TESTPLANS\", \"TESTCASES\", \"TESTSUITES\", \"MODULES\",\n \"HOOKS\", \"TEMPLATES\"\n ]\n conf = {v: os.environ.get(\"_\".join([prefix, v])) for v in names}\n for env in (e for e in conf.keys() if e[:len(prefix)] in names and conf[e]):\n conf[env] = expand_path(conf[env])\n if not os.path.exists(conf[env]):\n err(\"%s_%s: %r, does not exist\" % (prefix, env, conf[env]))\n return conf" | |
| }, | |
| { | |
| "code": "def _configure_users(self, site=None, full=0, only_data=0):\n site = site or ALL\n full = int(full)\n if full and not only_data:\n packager = self.get_satchel('packager')\n packager.install_required(type=SYSTEM, service=self.name)\n r = self.local_renderer\n params = self.get_user_vhosts(site=site)\n with settings(warn_only=True):\n self.add_admin_user()\n params = sorted(list(params))\n if not only_data:\n for user, password, vhost in params:\n r.env.broker_user = user\n r.env.broker_password = password\n r.env.broker_vhost = vhost\n with settings(warn_only=True):\n r.sudo('rabbitmqctl add_user {broker_user} {broker_password}')\n r.sudo('rabbitmqctl add_vhost {broker_vhost}')\n r.sudo('rabbitmqctl set_permissions -p {broker_vhost} {broker_user} \".*\" \".*\" \".*\"')\n r.sudo('rabbitmqctl set_permissions -p {broker_vhost} {admin_username} \".*\" \".*\" \".*\"')\n return params" | |
| }, | |
| { | |
| "code": "def str_cmd(cmd, cwd, env):\n process = subprocess.Popen(cmd, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=cwd, env=env)\n stdout_builder, stderr_builder = proc.async_stdout_stderr_builder(process)\n process.wait()\n stdout, stderr = stdout_builder.result(), stderr_builder.result()\n return {'command': ' '.join(cmd), 'stderr': stderr, 'stdout': stdout}" | |
| }, | |
| { | |
| "code": "def filter_dict(unfiltered, filter_keys):\n filtered = DotDict()\n for k in filter_keys:\n filtered[k] = unfiltered[k]\n return filtered" | |
| }, | |
| { | |
| "code": "def validate(self):\n warnings.warn(\n 'Property \"package.validate\" is deprecated.',\n UserWarning)\n descriptor = self.to_dict()\n self.profile.validate(descriptor)" | |
| }, | |
| { | |
| "code": "def _cleanup_and_die(data):\n tmpfiles = glob.glob(os.path.join(data.dirs.fastqs, \"tmp_*_R*.fastq\"))\n tmpfiles += glob.glob(os.path.join(data.dirs.fastqs, \"tmp_*.p\"))\n for tmpf in tmpfiles: \n os.remove(tmpf)" | |
| }, | |
| { | |
| "code": "def run(cmd, **kw):\n kw = kw.copy()\n kw.setdefault('warn', False)\n report_error = kw.pop('report_error', True)\n runner = kw.pop('runner', invoke_run)\n try:\n return runner(cmd, **kw)\n except exceptions.Failure as exc:\n sys.stdout.flush()\n sys.stderr.flush()\n if report_error:\n notify.error(\"Command `{}` failed with RC={}!\".format(cmd, exc.result.return_code,))\n raise\n finally:\n sys.stdout.flush()\n sys.stderr.flush()" | |
| }, | |
| { | |
| "code": "def expand_args(command):\n if isinstance(command, (str, unicode)):\n splitter = shlex.shlex(command.encode('utf-8'))\n splitter.whitespace = '|'\n splitter.whitespace_split = True\n command = []\n while True:\n token = splitter.get_token()\n if token:\n command.append(token)\n else:\n break\n command = list(map(shlex.split, command))\n return command" | |
| }, | |
| { | |
| "code": "def next_item(self):\n queue = self.queue\n try:\n item = queue.get(block=True, timeout=5)\n return item\n except Exception:\n return None" | |
| }, | |
| { | |
| "code": "def rem_active_module(module):\n modules = set(get_active_modules())\n modules.discard(module)\n new_modules_path = os.pathsep.join([m.path for m in modules])\n os.environ['CPENV_ACTIVE_MODULES'] = str(new_modules_path)" | |
| }, | |
| { | |
| "code": "def parse(filename):\n for event, elt in et.iterparse(filename, events= ('start', 'end', 'comment', 'pi'), huge_tree=True):\n if event == 'start':\n obj = _elt2obj(elt)\n obj['type'] = ENTER\n yield obj\n if elt.text:\n yield {'type': TEXT, 'text': elt.text}\n elif event == 'end':\n yield {'type': EXIT}\n if elt.tail:\n yield {'type': TEXT, 'text': elt.tail}\n elt.clear()\n elif event == 'comment':\n yield {'type': COMMENT, 'text': elt.text}\n elif event == 'pi':\n yield {'type': PI, 'text': elt.text}\n else:\n assert False, (event, elt)" | |
| }, | |
| { | |
| "code": "def showfig(fig, aspect=\"auto\"):\n ax = fig.gca()\n alim = list(ax.axis())\n if alim[3] < alim[2]:\n temp = alim[2]\n alim[2] = alim[3]\n alim[3] = temp\n ax.axis(alim)\n ax.set_aspect(aspect)\n fig.show()" | |
| }, | |
| { | |
| "code": "def _echo_setting(key):\n value = getattr(settings, key)\n secho('%s: ' % key, fg='magenta', bold=True, nl=False)\n secho(\n six.text_type(value),\n bold=True,\n fg='white' if isinstance(value, six.text_type) else 'cyan',\n )" | |
| }, | |
| { | |
| "code": "def _destroy_image_acquirer(self, ia):\n id_ = None\n if ia.device:\n ia.stop_image_acquisition()\n ia._release_data_streams()\n id_ = ia._device.id_\n if ia.device.node_map:\n if ia._chunk_adapter:\n ia._chunk_adapter.detach_buffer()\n ia._chunk_adapter = None\n self._logger.info(\n 'Detached a buffer from the chunk adapter of {0}.'.format(\n id_\n )\n )\n ia.device.node_map.disconnect()\n self._logger.info(\n 'Disconnected the port from the NodeMap of {0}.'.format(\n id_\n )\n )\n if ia._device.is_open():\n ia._device.close()\n self._logger.info(\n 'Closed Device module, {0}.'.format(id_)\n )\n ia._device = None\n if id_:\n self._logger.info(\n 'Destroyed the ImageAcquirer object which {0} '\n 'had belonged to.'.format(id_)\n )\n else:\n self._logger.info(\n 'Destroyed an ImageAcquirer.'\n )\n if self._profiler:\n self._profiler.print_diff()\n self._ias.remove(ia)" | |
| }, | |
| { | |
| "code": "def _fmt_args_kwargs(self, *some_args, **some_kwargs):\n if some_args:\n out_args = str(some_args).lstrip('(').rstrip(',)')\n if some_kwargs:\n out_kwargs = ', '.join([str(i).lstrip('(').rstrip(')').replace(', ',': ') for i in [\n (k,some_kwargs[k]) for k in sorted(some_kwargs.keys())]])\n if some_args and some_kwargs:\n return out_args + ', ' + out_kwargs\n elif some_args:\n return out_args\n elif some_kwargs:\n return out_kwargs\n else:\n return ''" | |
| }, | |
| { | |
| "code": "def pad_cells(table):\n col_sizes = [max(map(len, col)) for col in zip(*table)]\n for row in table:\n for cell_num, cell in enumerate(row):\n row[cell_num] = pad_to(cell, col_sizes[cell_num])\n return table" | |
| }, | |
| { | |
| "code": "def _add_document(self, doc_id, conn=None, nosave=False, score=1.0, payload=None,\n replace=False, partial=False, language=None, **fields):\n if conn is None:\n conn = self.redis\n if partial:\n replace = True\n args = [self.ADD_CMD, self.index_name, doc_id, score]\n if nosave:\n args.append('NOSAVE')\n if payload is not None:\n args.append('PAYLOAD')\n args.append(payload)\n if replace:\n args.append('REPLACE')\n if partial:\n args.append('PARTIAL')\n if language:\n args += ['LANGUAGE', language]\n args.append('FIELDS')\n args += list(itertools.chain(*fields.items()))\n return conn.execute_command(*args)" | |
| }, | |
| { | |
| "code": "def exploit_single(self, ip, operating_system):\n result = None\n if \"Windows Server 2008\" in operating_system or \"Windows 7\" in operating_system:\n result = subprocess.run(['python2', os.path.join(self.datadir, 'MS17-010', 'eternalblue_exploit7.py'), str(ip), os.path.join(self.datadir, 'final_combined.bin'), \"12\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n elif \"Windows Server 2012\" in operating_system or \"Windows 10\" in operating_system or \"Windows 8.1\" in operating_system:\n result = subprocess.run(['python2', os.path.join(self.datadir, 'MS17-010', 'eternalblue_exploit8.py'), str(ip), os.path.join(self.datadir, 'final_combined.bin'), \"12\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n else:\n return [\"System target could not be automatically identified\"]\n return result.stdout.decode('utf-8').split('\\n')" | |
| }, | |
| { | |
| "code": "def commit(self, *args, **kwargs):\n return super(Deposit, self).commit(*args, **kwargs)" | |
| }, | |
| { | |
| "code": "def ternary_operation(x):\n g = tf.get_default_graph()\n with g.gradient_override_map({\"Sign\": \"Identity\"}):\n threshold = _compute_threshold(x)\n x = tf.sign(tf.add(tf.sign(tf.add(x, threshold)), tf.sign(tf.add(x, -threshold))))\n return x" | |
| }, | |
| { | |
| "code": "def centralManager_didDisconnectPeripheral_error_(self, manager, peripheral, error):\n logger.debug('centralManager_didDisconnectPeripheral called')\n device = device_list().get(peripheral)\n if device is not None:\n device._set_disconnected()\n device_list().remove(peripheral)" | |
| }, | |
| { | |
| "code": "def track_enrollment(pathway, user_id, course_run_id, url_path=None):\n track_event(user_id, 'edx.bi.user.enterprise.onboarding', {\n 'pathway': pathway,\n 'url_path': url_path,\n 'course_run_id': course_run_id,\n })" | |
| }, | |
| { | |
| "code": "def addInstance(self, groundTruth, prediction, record = None, result = None):\n self.value = self.avg(prediction)" | |
| }, | |
| { | |
| "code": "def keypress(self, size, key):\n key = super().keypress(size, key)\n num_tabs = len(self._widgets)\n if key == self._keys['prev_tab']:\n self._tab_index = (self._tab_index - 1) % num_tabs\n self._update_tabs()\n elif key == self._keys['next_tab']:\n self._tab_index = (self._tab_index + 1) % num_tabs\n self._update_tabs()\n elif key == self._keys['close_tab']:\n if self._tab_index > 0:\n curr_tab = self._widgets[self._tab_index]\n self._widgets.remove(curr_tab)\n del self._widget_title[curr_tab]\n self._tab_index -= 1\n self._update_tabs()\n else:\n return key" | |
| }, | |
| { | |
| "code": "def send(self, dispatcher):\n if self.sent_complete:\n return\n sent = dispatcher.send(self.to_send)\n self.to_send = self.to_send[sent:]" | |
| }, | |
| { | |
| "code": "def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):\n docker_paths = sorted([\n var.docker_path if var.recursive else os.path.dirname(var.docker_path)\n for var in inputs | outputs | mounts\n if var.value\n ])\n env = {\n _SCRIPT_VARNAME: repr(script.value),\n _META_YAML_VARNAME: repr(job_descriptor.to_yaml()),\n 'DIR_COUNT': str(len(docker_paths))\n }\n for idx, path in enumerate(docker_paths):\n env['DIR_{}'.format(idx)] = os.path.join(providers_util.DATA_MOUNT_POINT,\n path)\n return env" | |
| }, | |
| { | |
| "code": "def _compute(self, inputs, outputs):\n if self._sfdr is None:\n raise RuntimeError(\"Spatial pooler has not been initialized\")\n if not self.topDownMode:\n self._iterations += 1\n buInputVector = inputs['bottomUpIn']\n resetSignal = False\n if 'resetIn' in inputs:\n assert len(inputs['resetIn']) == 1\n resetSignal = inputs['resetIn'][0] != 0\n rfOutput = self._doBottomUpCompute(\n rfInput = buInputVector.reshape((1,buInputVector.size)),\n resetSignal = resetSignal\n )\n outputs['bottomUpOut'][:] = rfOutput.flat\n else:\n topDownIn = inputs.get('topDownIn',None)\n spatialTopDownOut, temporalTopDownOut = self._doTopDownInfer(topDownIn)\n outputs['spatialTopDownOut'][:] = spatialTopDownOut\n if temporalTopDownOut is not None:\n outputs['temporalTopDownOut'][:] = temporalTopDownOut\n outputs['anomalyScore'][:] = 0" | |
| }, | |
| { | |
| "code": "def word_to_id(self, word):\n if word in self.vocab:\n return self.vocab[word]\n else:\n return self.unk_id" | |
| }, | |
| { | |
| "code": "def link_zscale(st):\n psf = st.get('psf')\n psf.param_dict['zscale'] = psf.param_dict['psf-zscale']\n psf.params[psf.params.index('psf-zscale')] = 'zscale'\n psf.global_zscale = True\n psf.param_dict.pop('psf-zscale')\n st.trigger_parameter_change()\n st.reset()" | |
| }, | |
| { | |
| "code": "def _getEphemeralMembers(self):\n e = BacktrackingTM._getEphemeralMembers(self)\n if self.makeCells4Ephemeral:\n e.extend(['cells4'])\n return e" | |
| }, | |
| { | |
| "code": "def check(self):\n if not self.is_valid:\n raise PolyaxonDeploymentConfigError(\n 'Deployment type `{}` not supported'.format(self.deployment_type))\n check = False\n if self.is_kubernetes:\n check = self.check_for_kubernetes()\n elif self.is_docker_compose:\n check = self.check_for_docker_compose()\n elif self.is_docker:\n check = self.check_for_docker()\n elif self.is_heroku:\n check = self.check_for_heroku()\n if not check:\n raise PolyaxonDeploymentConfigError(\n 'Deployment `{}` is not valid'.format(self.deployment_type))" | |
| }, | |
| { | |
| "code": "def write(self, album):\n page = self.template.render(**self.generate_context(album))\n output_file = os.path.join(album.dst_path, album.output_file)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(page)" | |
| }, | |
| { | |
| "code": "def _rebuildPartitionIdMap(self, partitionIdList):\n self._partitionIdMap = {}\n for row, partitionId in enumerate(partitionIdList):\n indices = self._partitionIdMap.get(partitionId, [])\n indices.append(row)\n self._partitionIdMap[partitionId] = indices" | |
| }, | |
| { | |
| "code": "def end(self):\n return Range(self.source_buffer, self.end_pos, self.end_pos,\n expanded_from=self.expanded_from)" | |
| }, | |
| { | |
| "code": "def _sample_with_priority(self, p):\n parent = 0\n while True:\n left = 2 * parent + 1\n if left >= len(self._memory):\n return parent\n left_p = self._memory[left] if left < self._capacity - 1 \\\n else (self._memory[left].priority or 0)\n if p <= left_p:\n parent = left\n else:\n if left + 1 >= len(self._memory):\n raise RuntimeError('Right child is expected to exist.')\n p -= left_p\n parent = left + 1" | |
| }, | |
| { | |
| "code": "def merge_da(self):\n print(' - Merging D and A timestamps', flush=True)\n ts_d, ts_par_d = self.S.get_timestamps_part(self.name_timestamps_d)\n ts_a, ts_par_a = self.S.get_timestamps_part(self.name_timestamps_a)\n ts, a_ch, part = merge_da(ts_d, ts_par_d, ts_a, ts_par_a)\n assert a_ch.sum() == ts_a.shape[0]\n assert (~a_ch).sum() == ts_d.shape[0]\n assert a_ch.size == ts_a.shape[0] + ts_d.shape[0]\n self.ts, self.a_ch, self.part = ts, a_ch, part\n self.clk_p = ts_d.attrs['clk_p']" | |
| }, | |
| { | |
| "code": "def _httplib2_init(username, password):\n obj = httplib2.Http()\n if username and password:\n obj.add_credentials(username, password)\n return obj" | |
| }, | |
| { | |
| "code": "def _setVirtualEnv():\n try:\n activate = options.virtualenv.activate_cmd\n except AttributeError:\n activate = None\n if activate is None:\n virtualenv = path(os.environ.get('VIRTUAL_ENV', ''))\n if not virtualenv:\n virtualenv = options.paved.cwd\n else:\n virtualenv = path(virtualenv)\n activate = virtualenv / 'bin' / 'activate'\n if activate.exists():\n info('Using default virtualenv at %s' % activate)\n options.setdotted('virtualenv.activate_cmd', 'source %s' % activate)" | |
| }, | |
| { | |
| "code": "def settings(self):\n for table in self.tables:\n if isinstance(table, SettingTable):\n for statement in table.statements:\n yield statement" | |
| }, | |
| { | |
| "code": "def on_new(self):\n interpreter, pyserver, args = self._get_backend_parameters()\n self.setup_editor(self.tabWidget.create_new_document(\n extension='.py', interpreter=interpreter, server_script=pyserver,\n args=args))\n self.actionRun.setDisabled(True)\n self.actionConfigure_run.setDisabled(True)" | |
| }, | |
| { | |
| "code": "def build_sdist(sdist_directory, config_settings):\n backend = _build_backend()\n try:\n return backend.build_sdist(sdist_directory, config_settings)\n except getattr(backend, 'UnsupportedOperation', _DummyException):\n raise GotUnsupportedOperation(traceback.format_exc())" | |
| }, | |
| { | |
| "code": "def from_separate(cls, meta: ProgramDescription, vertex_source, geometry_source=None, fragment_source=None,\r\n tess_control_source=None, tess_evaluation_source=None):\r\n instance = cls(meta)\r\n instance.vertex_source = ShaderSource(\r\n VERTEX_SHADER,\r\n meta.path or meta.vertex_shader,\r\n vertex_source,\r\n )\r\n if geometry_source:\r\n instance.geometry_source = ShaderSource(\r\n GEOMETRY_SHADER,\r\n meta.path or meta.geometry_shader,\r\n geometry_source,\r\n )\r\n if fragment_source:\r\n instance.fragment_source = ShaderSource(\r\n FRAGMENT_SHADER,\r\n meta.path or meta.fragment_shader,\r\n fragment_source,\r\n )\r\n if tess_control_source:\r\n instance.tess_control_source = ShaderSource(\r\n TESS_CONTROL_SHADER,\r\n meta.path or meta.tess_control_shader,\r\n tess_control_source,\r\n )\r\n if tess_evaluation_source:\r\n instance.tess_evaluation_source = ShaderSource(\r\n TESS_EVALUATION_SHADER,\r\n meta.path or meta.tess_control_shader,\r\n tess_evaluation_source,\r\n )\r\n return instance" | |
| }, | |
| { | |
| "code": "def format_objects(objects, children=False, columns=None, header=True):\n columns = columns or ('NAME', 'TYPE', 'PATH')\n objects = sorted(objects, key=_type_and_name)\n data = []\n for obj in objects:\n if isinstance(obj, cpenv.VirtualEnvironment):\n data.append(get_info(obj))\n modules = obj.get_modules()\n if children and modules:\n for mod in modules:\n data.append(get_info(mod, indent=2, root=obj.path))\n else:\n data.append(get_info(obj))\n maxes = [len(max(col, key=len)) for col in zip(*data)]\n tmpl = '{:%d} {:%d} {:%d}' % tuple(maxes)\n lines = []\n if header:\n lines.append('\\n' + bold_blue(tmpl.format(*columns)))\n for obj_data in data:\n lines.append(tmpl.format(*obj_data))\n return '\\n'.join(lines)" | |
| }, | |
| { | |
| "code": "def index():\n page = request.args.get('page', 1, type=int)\n per_page = request.args.get('per_page', 5, type=int)\n q = request.args.get('q', '')\n groups = Group.query_by_user(current_user, eager=True)\n if q:\n groups = Group.search(groups, q)\n groups = groups.paginate(page, per_page=per_page)\n requests = Membership.query_requests(current_user).count()\n invitations = Membership.query_invitations(current_user).count()\n return render_template(\n 'invenio_groups/index.html',\n groups=groups,\n requests=requests,\n invitations=invitations,\n page=page,\n per_page=per_page,\n q=q\n )" | |
| }, | |
| { | |
| "code": "def _get_contour_values(min_val, max_val, base=0, interval=100):\n i = base\n out = []\n if min_val < base:\n while i >= min_val:\n i -= interval\n while i <= max_val:\n if i >= min_val:\n out.append(i)\n i += interval\n return out" | |
| }, | |
| { | |
| "code": "def intersection(self, i):\n if self.intersects(i):\n return Interval(max(self.start, i.start), min(self.end, i.end))\n else:\n return None" | |
| }, | |
| { | |
| "code": "def wait_for_page_to_load(self):\n self.wait.until(lambda _: self.loaded)\n self.pm.hook.pypom_after_wait_for_page_to_load(page=self)\n return self" | |
| }, | |
| { | |
| "code": "def with_access_to(self, request, *args, **kwargs):\n self.queryset = self.queryset.order_by('name')\n enterprise_id = self.request.query_params.get('enterprise_id', None)\n enterprise_slug = self.request.query_params.get('enterprise_slug', None)\n enterprise_name = self.request.query_params.get('search', None)\n if enterprise_id is not None:\n self.queryset = self.queryset.filter(uuid=enterprise_id)\n elif enterprise_slug is not None:\n self.queryset = self.queryset.filter(slug=enterprise_slug)\n elif enterprise_name is not None:\n self.queryset = self.queryset.filter(name__icontains=enterprise_name)\n return self.list(request, *args, **kwargs)" | |
| }, | |
| { | |
| "code": "def push(self, ip_packet):\n data_len = len(ip_packet.data.data)\n seq_id = ip_packet.data.seq\n if data_len == 0:\n self._next_seq_id = seq_id\n return False\n if self._next_seq_id != -1 and seq_id != self._next_seq_id:\n return False\n self._next_seq_id = seq_id + data_len\n with self._lock_packets:\n self._length += len(ip_packet.data.data)\n self._remaining += len(ip_packet.data.data)\n self._packets.append(ip_packet)\n return True" | |
| }, | |
| { | |
| "code": "def do_toggle_variables(self, action):\n self.show_vars = action.get_active()\n if self.show_vars:\n self.show_variables_window()\n else:\n self.hide_variables_window()" | |
| }, | |
| { | |
| "code": "def _validate_ram(ram_in_mb):\n return int(GoogleV2CustomMachine._MEMORY_MULTIPLE * math.ceil(\n ram_in_mb / GoogleV2CustomMachine._MEMORY_MULTIPLE))" | |
| }, | |
| { | |
| "code": "def _arg_parser():\n description = \"Converts a completezip to a litezip\"\n parser = argparse.ArgumentParser(description=description)\n verbose_group = parser.add_mutually_exclusive_group()\n verbose_group.add_argument(\n '-v', '--verbose', action='store_true',\n dest='verbose', default=None,\n help=\"increase verbosity\")\n verbose_group.add_argument(\n '-q', '--quiet', action='store_false',\n dest='verbose', default=None,\n help=\"print nothing to stdout or stderr\")\n parser.add_argument(\n 'location',\n help=\"Location of the unpacked litezip\")\n return parser" | |
| }, | |
| { | |
| "code": "def with_logger(cls):\n attr_name = '_logger'\n cls_name = cls.__qualname__\n module = cls.__module__\n if module is not None:\n cls_name = module + '.' + cls_name\n else:\n raise AssertionError\n setattr(cls, attr_name, logging.getLogger(cls_name))\n return cls" | |
| }, | |
| { | |
| "code": "def _f_gene(sid, prefix=\"G_\"):\n sid = sid.replace(SBML_DOT, \".\")\n return _clip(sid, prefix)" | |
| }, | |
| { | |
| "code": "def getInstanceJstack(self, topology_info, instance_id):\n pid_response = yield getInstancePid(topology_info, instance_id)\n try:\n http_client = tornado.httpclient.AsyncHTTPClient()\n pid_json = json.loads(pid_response)\n pid = pid_json['stdout'].strip()\n if pid == '':\n raise Exception('Failed to get pid')\n endpoint = utils.make_shell_endpoint(topology_info, instance_id)\n url = \"%s/jstack/%s\" % (endpoint, pid)\n response = yield http_client.fetch(url)\n Log.debug(\"HTTP call for url: %s\", url)\n raise tornado.gen.Return(response.body)\n except tornado.httpclient.HTTPError as e:\n raise Exception(str(e))" | |
| }, | |
| { | |
| "code": "def answer_display(self, s=''):\n padding = len(max(self.questions.keys(), key=len)) + 5\n for key in list(self.answers.keys()):\n s += '{:>{}} : {}\\n'.format(key, padding, self.answers[key])\n return s" | |
| }, | |
| { | |
| "code": "def pickle_save(thing,fname):\n pickle.dump(thing, open(fname,\"wb\"),pickle.HIGHEST_PROTOCOL)\n return thing" | |
| }, | |
| { | |
| "code": "def utime(self, *args, **kwargs):\n os.utime(self.extended_path, *args, **kwargs)" | |
| }, | |
| { | |
| "code": "def map_memory_callback(self, address, size, perms, name, offset, result):\n logger.info(' '.join((\"Mapping Memory @\",\n hex(address) if type(address) is int else \"0x??\",\n hr_size(size), \"-\",\n perms, \"-\",\n f\"{name}:{hex(offset) if name else ''}\", \"->\",\n hex(result))))\n self._emu.mem_map(address, size, convert_permissions(perms))\n self.copy_memory(address, size)" | |
| }, | |
| { | |
| "code": "def tool_factory(clsname, name, driver, base=GromacsCommand):\n clsdict = {\n 'command_name': name,\n 'driver': driver,\n '__doc__': property(base._get_gmx_docs)\n }\n return type(clsname, (base,), clsdict)" | |
| }, | |
| { | |
| "code": "def _xml(self, root):\n element = root.createElement(self.name)\n keys = self.attrs.keys()\n keys.sort()\n for a in keys:\n element.setAttribute(a, self.attrs[a])\n if self.body:\n text = root.createTextNode(self.body)\n element.appendChild(text)\n for c in self.elements:\n element.appendChild(c._xml(root))\n return element" | |
| }, | |
| { | |
| "code": "def operation_list(uploader):\n files = uploader.file_list()\n for f in files:\n log.info(\"{file:30s} {size}\".format(file=f[0], size=f[1]))" | |
| }, | |
| { | |
| "code": "def jobCancelAllRunningJobs(self):\n with ConnectionFactory.get() as conn:\n query = 'UPDATE %s SET cancel=TRUE WHERE status<>%%s ' \\\n % (self.jobsTableName,)\n conn.cursor.execute(query, [self.STATUS_COMPLETED])\n return" | |
| }, | |
| { | |
| "code": "def union(self, i):\n if self.intersects(i) or self.end + 1 == i.start or i.end + 1 == self.start:\n return Interval(min(self.start, i.start), max(self.end, i.end))\n else:\n return None" | |
| }, | |
| { | |
| "code": "def clear_obj(self, obj):\n obj_nodes = self.get_nodes_with(obj)\n removed = set()\n for node in obj_nodes:\n if self.has_node(node):\n removed.update(self.clear_descendants(node))\n return removed" | |
| }, | |
| { | |
| "code": "def terminate(self):\n if self._pool is not None:\n self._pool.terminate()\n self._pool.join()\n self._pool = None" | |
| }, | |
| { | |
| "code": "def tex_parse(string):\n\tstring = string.replace('{', '').replace('}', '')\n\tdef tex_replace(match):\n\t\treturn \\\n\t\t\tsub(r'\\^(\\w)', r'<sup>\\1</sup>',\n\t\t\tsub(r'\\^\\{(.*?)\\}', r'<sup>\\1</sup>',\n\t\t\tsub(r'\\_(\\w)', r'<sub>\\1</sub>',\n\t\t\tsub(r'\\_\\{(.*?)\\}', r'<sub>\\1</sub>',\n\t\t\tsub(r'\\\\(' + GREEK_LETTERS + ')', r'&\\1;', match.group(1))))))\n\treturn mark_safe(sub(r'\\$([^\\$]*)\\$', tex_replace, escape(string)))" | |
| }, | |
| { | |
| "code": "def create_tar(tar_filename, files, config_dir, config_files):\n with contextlib.closing(tarfile.open(tar_filename, 'w:gz', dereference=True)) as tar:\n for filename in files:\n if os.path.isfile(filename):\n tar.add(filename, arcname=os.path.basename(filename))\n else:\n raise Exception(\"%s is not an existing file\" % filename)\n if os.path.isdir(config_dir):\n tar.add(config_dir, arcname=get_heron_sandbox_conf_dir())\n else:\n raise Exception(\"%s is not an existing directory\" % config_dir)\n for filename in config_files:\n if os.path.isfile(filename):\n arcfile = os.path.join(get_heron_sandbox_conf_dir(), os.path.basename(filename))\n tar.add(filename, arcname=arcfile)\n else:\n raise Exception(\"%s is not an existing file\" % filename)" | |
| }, | |
| { | |
| "code": "def _load(self, config):\n if isinstance(config, six.string_types):\n try:\n config = json.loads(config)\n except ValueError:\n pass\n if not isinstance(config, dict):\n raise TypeError('config block must be an istance '\n 'of dict or a valid NetJSON string')\n return config" | |
| }, | |
| { | |
| "code": "def start(self):\n self.__thread = Thread(target=self.__run, args=(True, False))\n self.__thread.setDaemon(True)\n self.__thread.start()" | |
| }, | |
| { | |
| "code": "def parse_log_messages(self, text):\n regex = r\"commit ([0-9a-f]+)\\nAuthor: (.*?)\\n\\n(.*?)(?:\\n\\n|$)\"\n messages = re.findall(regex, text, re.DOTALL)\n parsed = []\n for commit, author, message in messages:\n parsed.append((\n commit[:10],\n re.sub(r\"\\s*<.*?>\", \"\", author),\n message.strip()\n ))\n return parsed" | |
| }, | |
| { | |
| "code": "def shuffle_cols(seqarr, newarr, cols):\n for idx in xrange(cols.shape[0]):\n newarr[:, idx] = seqarr[:, cols[idx]]\n return newarr" | |
| }, | |
| { | |
| "code": "def variables(self):\n for table in self.tables:\n if isinstance(table, VariableTable):\n for statement in table.rows:\n if statement[0] != \"\":\n yield statement" | |
| }, | |
| { | |
| "code": "def empty_like(array, dtype=None):\n array = numpy.asarray(array)\n if dtype is None: \n dtype = array.dtype\n return anonymousmemmap(array.shape, dtype)" | |
| }, | |
| { | |
| "code": "def handle(self):\n while True:\n try:\n line = self.rfile.readline()\n try:\n cmd = json.loads(line)\n except Exception, exc:\n self.wfile.write(repr(exc) + NEWLINE)\n continue\n else:\n handler = getattr(self, 'handle_' + cmd[0], None)\n if not handler:\n self.wfile.write(\n repr(CommandNotFound(cmd[0])) + NEWLINE)\n continue\n return_value = handler(*cmd[1:])\n if not return_value:\n continue\n self.wfile.write(\n one_lineify(json.dumps(return_value)) + NEWLINE)\n except Exception, exc:\n self.wfile.write(repr(exc) + NEWLINE)\n continue" | |
| }, | |
| { | |
| "code": "def build_index_and_mapping(triples):\n ents = bidict()\n rels = bidict()\n ent_id = 0\n rel_id = 0\n collected = []\n for t in triples:\n for e in (t.head, t.tail):\n if e not in ents:\n ents[e] = ent_id\n ent_id += 1\n if t.relation not in rels:\n rels[t.relation] = rel_id\n rel_id += 1\n collected.append(kgedata.TripleIndex(ents[t.head], rels[t.relation], ents[t.tail]))\n return collected, ents, rels" | |
| }, | |
| { | |
| "code": "def file(self, item, **kwargs):\n query_string = \"/{t}/{u}/items/{i}/file\".format(\n u=self.library_id, t=self.library_type, i=item.upper()\n )\n return self._build_query(query_string, no_params=True)" | |
| }, | |
| { | |
| "code": "def prohibit(self, data):\n for char in data:\n for lookup in self.prohibited:\n if lookup(char):\n raise StringprepError(\"Prohibited character: {0!r}\"\n .format(char))\n return data" | |
| }, | |
| { | |
| "code": "def _read_header(self, data):\n version = self._read_version(data)\n version_info = self._get_version_info(version)\n header_data = data[:version_info['header_size']]\n header = version_info['header']\n header = header._make(\n unpack(version_info['header_format'], header_data))\n header = dict(header._asdict())\n flags = list(\"{0:0>8b}\".format(header['flags']))\n flags = dict(version_info['flags']._make(flags)._asdict())\n flags = dict((i, bool(int(j))) for i, j in flags.iteritems())\n header['flags'] = flags\n timestamp = None\n if flags['timestamp']:\n ts_start = version_info['header_size']\n ts_end = ts_start + version_info['timestamp_size']\n timestamp_data = data[ts_start:ts_end]\n timestamp = unpack(\n version_info['timestamp_format'], timestamp_data)[0]\n header['info'] = {'timestamp': timestamp}\n return header" | |
| }, | |
| { | |
| "code": "def fill_dups_arr(data):\n duplefiles = glob.glob(os.path.join(data.tmpdir, \"duples_*.tmp.npy\"))\n duplefiles.sort(key=lambda x: int(x.rsplit(\"_\", 1)[-1][:-8]))\n io5 = h5py.File(data.clust_database, 'r+')\n dfilter = io5[\"duplicates\"]\n init = 0\n for dupf in duplefiles:\n end = int(dupf.rsplit(\"_\", 1)[-1][:-8])\n inarr = np.load(dupf)\n dfilter[init:end] = inarr\n init += end-init\n LOGGER.info(\"all duplicates: %s\", dfilter[:].sum())\n io5.close()" | |
| }, | |
| { | |
| "code": "def _pop(self, model):\n tags = []\n for tag in model.tags:\n if self.is_tag(tag):\n tags.append(tag)\n if tags:\n for tag in tags:\n model.tags.remove(tag)\n return tags" | |
| }, | |
| { | |
| "code": "def lookup_api_key_info():\n info = {}\n with db_connect() as conn:\n with conn.cursor() as cursor:\n cursor.execute(ALL_KEY_INFO_SQL_STMT)\n for row in cursor.fetchall():\n id, key, name, groups = row\n user_id = \"api_key:{}\".format(id)\n info[key] = dict(id=id, user_id=user_id,\n name=name, groups=groups)\n return info" | |
| }, | |
| { | |
| "code": "def _get_base_component(self):\n comp = topology_pb2.Component()\n comp.name = self.name\n comp.spec = topology_pb2.ComponentObjectSpec.Value(\"PYTHON_CLASS_NAME\")\n comp.class_name = self.python_class_path\n comp.config.CopyFrom(self._get_comp_config())\n return comp" | |
| } | |
| ] |